CombinedText stringlengths 4 3.42M |
|---|
//! Type aliases to C types like c_int for use with bindgen
#![allow(non_camel_case_types)]
#![deny(warnings)]
#![no_std]
// AD = Architecture dependent
pub use ad::*;
// OD = OS dependent
pub use od::*;
// PWD = Pointer Width Dependent
pub use pwd::*;
#[cfg(any(target_arch = "aarch64",
target_arch = "arm",
target_arch = "asmjs",
target_arch = "wasm32",
target_arch = "wasm64",
target_arch = "powerpc",
target_arch = "powerpc64",
target_arch = "s390x"))]
mod ad {
pub type c_char = ::c_uchar;
pub type c_int = i32;
pub type c_uint = u32;
}
#[cfg(any(target_arch = "mips",
target_arch = "mips64",
target_arch = "sparc64",
target_arch = "x86",
target_arch = "x86_64",
target_arch = "nvptx",
target_arch = "nvptx64"))]
mod ad {
pub type c_char = ::c_schar;
pub type c_int = i32;
pub type c_uint = u32;
}
#[cfg(target_arch = "msp430")]
mod ad {
pub type c_char = ::c_uchar;
pub type c_int = i16;
pub type c_uint = u16;
}
// NOTE c_{,u}long definitions come from libc v0.2.3
#[cfg(not(any(windows,
target_os = "redox",
target_os = "solaris")))]
mod od {
#[cfg(any(target_pointer_width = "16",
target_pointer_width = "32"))]
pub type c_long = i32;
#[cfg(any(target_pointer_width = "16",
target_pointer_width = "32"))]
pub type c_ulong = u32;
#[cfg(target_pointer_width = "64")]
pub type c_long = i64;
#[cfg(target_pointer_width = "64")]
pub type c_ulong = u64;
}
#[cfg(windows)]
mod od {
pub type c_long = i32;
pub type c_ulong = u32;
}
#[cfg(any(target_os = "redox",
target_os = "solaris"))]
mod od {
pub type c_long = i64;
pub type c_ulong = u64;
}
#[cfg(target_pointer_width = "32")]
mod pwd {}
#[cfg(target_pointer_width = "64")]
mod pwd {}
pub type int8_t = i8;
pub type int16_t = i16;
pub type int32_t = i32;
pub type int64_t = i64;
pub type uint8_t = u8;
pub type uint16_t = u16;
pub type uint32_t = u32;
pub type uint64_t = u64;
pub type c_schar = i8;
pub type c_short = i16;
pub type c_longlong = i64;
pub type c_uchar = u8;
pub type c_ushort = u16;
pub type c_ulonglong = u64;
// NOTE from libc v0.2.23
// Use repr(u8) as LLVM expects `void*` to be the same as `i8*` to help enable
// more optimization opportunities around it recognizing things like
// malloc/free.
#[repr(u8)]
pub enum c_void {
// Two dummy variants so the #[repr] attribute can be used.
#[doc(hidden)]
__variant1,
#[doc(hidden)]
__variant2,
}
Auto merge of #9 - Amanieu:patch-1, r=japaric
Include additional types from libc
Copied a few extra definitions from the libc crate.
//! Type aliases to C types like c_int for use with bindgen
#![allow(non_camel_case_types)]
#![deny(warnings)]
#![no_std]
// AD = Architecture dependent
pub use ad::*;
// OD = OS dependent
pub use od::*;
// PWD = Pointer Width Dependent
pub use pwd::*;
#[cfg(any(target_arch = "aarch64",
target_arch = "arm",
target_arch = "asmjs",
target_arch = "wasm32",
target_arch = "wasm64",
target_arch = "powerpc",
target_arch = "powerpc64",
target_arch = "s390x"))]
mod ad {
pub type c_char = ::c_uchar;
pub type c_int = i32;
pub type c_uint = u32;
}
#[cfg(any(target_arch = "mips",
target_arch = "mips64",
target_arch = "sparc64",
target_arch = "x86",
target_arch = "x86_64",
target_arch = "nvptx",
target_arch = "nvptx64"))]
mod ad {
pub type c_char = ::c_schar;
pub type c_int = i32;
pub type c_uint = u32;
}
#[cfg(target_arch = "msp430")]
mod ad {
pub type c_char = ::c_uchar;
pub type c_int = i16;
pub type c_uint = u16;
}
// NOTE c_{,u}long definitions come from libc v0.2.3
#[cfg(not(any(windows,
target_os = "redox",
target_os = "solaris")))]
mod od {
#[cfg(any(target_pointer_width = "16",
target_pointer_width = "32"))]
pub type c_long = i32;
#[cfg(any(target_pointer_width = "16",
target_pointer_width = "32"))]
pub type c_ulong = u32;
#[cfg(target_pointer_width = "64")]
pub type c_long = i64;
#[cfg(target_pointer_width = "64")]
pub type c_ulong = u64;
}
#[cfg(windows)]
mod od {
pub type c_long = i32;
pub type c_ulong = u32;
}
#[cfg(any(target_os = "redox",
target_os = "solaris"))]
mod od {
pub type c_long = i64;
pub type c_ulong = u64;
}
#[cfg(target_pointer_width = "32")]
mod pwd {}
#[cfg(target_pointer_width = "64")]
mod pwd {}
pub type int8_t = i8;
pub type int16_t = i16;
pub type int32_t = i32;
pub type int64_t = i64;
pub type uint8_t = u8;
pub type uint16_t = u16;
pub type uint32_t = u32;
pub type uint64_t = u64;
pub type c_schar = i8;
pub type c_short = i16;
pub type c_longlong = i64;
pub type c_uchar = u8;
pub type c_ushort = u16;
pub type c_ulonglong = u64;
pub type c_float = f32;
pub type c_double = f64;
pub type intmax_t = i64;
pub type uintmax_t = u64;
pub type size_t = usize;
pub type ptrdiff_t = isize;
pub type intptr_t = isize;
pub type uintptr_t = usize;
pub type ssize_t = isize;
// NOTE from libc v0.2.23
// Use repr(u8) as LLVM expects `void*` to be the same as `i8*` to help enable
// more optimization opportunities around it recognizing things like
// malloc/free.
#[repr(u8)]
pub enum c_void {
// Two dummy variants so the #[repr] attribute can be used.
#[doc(hidden)]
__variant1,
#[doc(hidden)]
__variant2,
}
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Support for matching file paths against Unix shell style patterns.
//!
//! The `glob` and `glob_with` functions, in concert with the `Paths`
//! type, allow querying the filesystem for all files that match a particular
//! pattern - just like the libc `glob` function (for an example see the `glob`
//! documentation). The methods on the `Pattern` type provide functionality
//! for checking if individual paths match a particular pattern - in a similar
//! manner to the libc `fnmatch` function
//! For consistency across platforms, and for Windows support, this module
//! is implemented entirely in Rust rather than deferring to the libc
//! `glob`/`fnmatch` functions.
#![feature(associated_types)]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/glob/")]
use std::ascii::AsciiExt;
use std::cell::Cell;
use std::{cmp, os, path};
use std::io::fs::{self, PathExtensions};
use std::path::is_sep;
use std::string::String;
use PatternToken::{Char, AnyChar, AnySequence, AnyRecursiveSequence, AnyWithin, AnyExcept};
use CharSpecifier::{SingleChar, CharRange};
use MatchResult::{Match, SubPatternDoesntMatch, EntirePatternDoesntMatch};
/// An iterator that yields Paths from the filesystem that match a particular
/// pattern - see the `glob` function for more details.
pub struct Paths {
dir_patterns: Vec<Pattern>,
require_dir: bool,
options: MatchOptions,
todo: Vec<(Path,uint)>,
}
/// Return an iterator that produces all the Paths that match the given pattern,
/// which may be absolute or relative to the current working directory.
///
/// This method uses the default match options and is equivalent to calling
/// `glob_with(pattern, MatchOptions::new())`. Use `glob_with` directly if you
/// want to use non-default match options.
///
/// # Example
///
/// Consider a directory `/media/pictures` containing only the files `kittens.jpg`,
/// `puppies.jpg` and `hamsters.gif`:
///
/// ```rust
/// use glob::glob;
///
/// for path in glob("/media/pictures/*.jpg") {
/// println!("{}", path.display());
/// }
/// ```
///
/// The above code will print:
///
/// ```ignore
/// /media/pictures/kittens.jpg
/// /media/pictures/puppies.jpg
/// ```
///
pub fn glob(pattern: &str) -> Paths {
glob_with(pattern, &MatchOptions::new())
}
/// Return an iterator that produces all the Paths that match the given pattern,
/// which may be absolute or relative to the current working directory.
///
/// This function accepts Unix shell style patterns as described by `Pattern::new(..)`.
/// The options given are passed through unchanged to `Pattern::matches_with(..)` with
/// the exception that `require_literal_separator` is always set to `true` regardless of the
/// value passed to this function.
///
/// Paths are yielded in alphabetical order, as absolute paths.
pub fn glob_with(pattern: &str, options: &MatchOptions) -> Paths {
#[cfg(windows)]
fn check_windows_verbatim(p: &Path) -> bool { path::windows::is_verbatim(p) }
#[cfg(not(windows))]
fn check_windows_verbatim(_: &Path) -> bool { false }
// calculate root this way to handle volume-relative Windows paths correctly
let mut root = os::getcwd().unwrap();
let pat_root = Path::new(pattern).root_path();
if pat_root.is_some() {
if check_windows_verbatim(pat_root.as_ref().unwrap()) {
// FIXME: How do we want to handle verbatim paths? I'm inclined to return nothing,
// since we can't very well find all UNC shares with a 1-letter server name.
return Paths {
dir_patterns: Vec::new(),
require_dir: false,
options: options.clone(),
todo: Vec::new(),
};
}
root.push(pat_root.as_ref().unwrap());
}
let root_len = pat_root.map_or(0u, |p| p.as_vec().len());
let dir_patterns = pattern.slice_from(cmp::min(root_len, pattern.len()))
.split_terminator(is_sep)
.map(|s| Pattern::new(s))
.collect::<Vec<Pattern>>();
let require_dir = pattern.chars().next_back().map(is_sep) == Some(true);
let mut todo = Vec::new();
if dir_patterns.len() > 0 {
// Shouldn't happen, but we're using -1 as a special index.
assert!(dir_patterns.len() < -1 as uint);
fill_todo(&mut todo, dir_patterns.as_slice(), 0, &root, options);
}
Paths {
dir_patterns: dir_patterns,
require_dir: require_dir,
options: options.clone(),
todo: todo,
}
}
impl Iterator for Paths {
type Item = Path;
fn next(&mut self) -> Option<Path> {
loop {
if self.dir_patterns.is_empty() || self.todo.is_empty() {
return None;
}
let (path,idx) = self.todo.pop().unwrap();
// idx -1: was already checked by fill_todo, maybe path was '.' or
// '..' that we can't match here because of normalization.
if idx == -1 as uint {
if self.require_dir && !path.is_dir() { continue; }
return Some(path);
}
let ref pattern = self.dir_patterns[idx];
let is_recursive = pattern.is_recursive;
let is_last = idx == self.dir_patterns.len() - 1;
// special casing for recursive patterns when globbing
// if it's a recursive pattern and it's not the last dir_patterns,
// test if it matches the next non-recursive pattern,
// if it does, then move to the pattern after the next pattern
// otherwise accept the path based on the recursive pattern
// and remain on the recursive pattern
if is_recursive && !is_last {
// the next non-recursive pattern
let mut next = idx + 1;
// collapse consecutive recursive patterns
while next < self.dir_patterns.len() && self.dir_patterns[next].is_recursive {
next += 1;
}
// no non-recursive patterns follow the current one
// so auto-accept all remaining recursive paths
if next == self.dir_patterns.len() {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
next - 1, &path, &self.options);
return Some(path);
}
let ref next_pattern = self.dir_patterns[next];
let is_match = next_pattern.matches_with(match path.filename_str() {
// this ugly match needs to go here to avoid a borrowck error
None => {
// FIXME (#9639): How do we handle non-utf8 filenames? Ignore them for now
// Ideally we'd still match them against a *
continue;
}
Some(x) => x
}, &self.options);
// determine how to advance
let (current_idx, next_idx) =
if is_match {
// accept the pattern after the next non-recursive pattern
(next, next + 1)
} else {
// next pattern still hasn't matched
// so stay on this recursive pattern
(next - 1, next - 1)
};
if current_idx == self.dir_patterns.len() - 1 {
// it is not possible for a pattern to match a directory *AND* its children
// so we don't need to check the children
if !self.require_dir || path.is_dir() {
return Some(path);
}
} else {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
next_idx, &path, &self.options);
}
}
// it's recursive and it's the last pattern
// automatically match everything else recursively
else if is_recursive && is_last {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
idx, &path, &self.options);
return Some(path);
}
// not recursive, so match normally
else if pattern.matches_with(match path.filename_str() {
// this ugly match needs to go here to avoid a borrowck error
None => {
// FIXME (#9639): How do we handle non-utf8 filenames? Ignore them for now
// Ideally we'd still match them against a *
continue;
}
Some(x) => x
}, &self.options) {
if idx == self.dir_patterns.len() - 1 {
// it is not possible for a pattern to match a directory *AND* its children
// so we don't need to check the children
if !self.require_dir || path.is_dir() {
return Some(path);
}
} else {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
idx + 1, &path, &self.options);
}
}
}
}
}
fn list_dir_sorted(path: &Path) -> Option<Vec<Path>> {
match fs::readdir(path) {
Ok(mut children) => {
children.sort_by(|p1, p2| p2.filename().cmp(&p1.filename()));
Some(children.into_iter().collect())
}
Err(..) => None
}
}
/// A compiled Unix shell style pattern.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct Pattern {
tokens: Vec<PatternToken>,
is_recursive: bool,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum PatternToken {
Char(char),
AnyChar,
AnySequence,
AnyRecursiveSequence,
AnyWithin(Vec<CharSpecifier> ),
AnyExcept(Vec<CharSpecifier> )
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum CharSpecifier {
SingleChar(char),
CharRange(char, char)
}
#[derive(Copy, PartialEq)]
enum MatchResult {
Match,
SubPatternDoesntMatch,
EntirePatternDoesntMatch
}
impl Pattern {
/// This function compiles Unix shell style patterns: `?` matches any single
/// character, `*` matches any (possibly empty) sequence of characters and
/// `[...]` matches any character inside the brackets, unless the first
/// character is `!` in which case it matches any character except those
/// between the `!` and the `]`. Character sequences can also specify ranges
/// of characters, as ordered by Unicode, so e.g. `[0-9]` specifies any
/// character between 0 and 9 inclusive.
///
/// A sequence of two `*` characters, `**`, acts like a single `*` except
/// that it also matches path separators, making it useful for matching
/// on arbitrary subdirectories. This sequence **must** form a single path
/// component, so neither `**a` nor `b**` is valid and will instead be treated
/// literally. A sequence of more than two consecutive `*` characters is
/// treated literally.
///
/// The metacharacters `?`, `*`, `[`, `]` can be matched by using brackets
/// (e.g. `[?]`). When a `]` occurs immediately following `[` or `[!` then
/// it is interpreted as being part of, rather then ending, the character
/// set, so `]` and NOT `]` can be matched by `[]]` and `[!]]` respectively.
/// The `-` character can be specified inside a character sequence pattern by
/// placing it at the start or the end, e.g. `[abc-]`.
///
/// When a `[` does not have a closing `]` before the end of the string then
/// the `[` will be treated literally.
pub fn new(pattern: &str) -> Pattern {
let chars = pattern.chars().collect::<Vec<_>>();
let mut tokens = Vec::new();
let mut is_recursive = false;
let mut i = 0;
while i < chars.len() {
match chars[i] {
'?' => {
tokens.push(AnyChar);
i += 1;
}
'*' => {
let old = i;
while i < chars.len() && chars[i] == '*' {
i += 1;
}
let count = i - old;
if count > 2 {
for _ in range(0u, count) {
tokens.push(Char('*'));
}
} else if count == 2 {
// ** can only be an entire path component
// i.e. a/**/b is valid, but a**/b or a/**b is not
// invalid matches are treated literally
let is_valid =
// begins with '/' or is the beginning of the pattern
if i == 2 || chars[i - count - 1] == '/' {
// it ends in a '/'
if i < chars.len() && chars[i] == '/' {
i += 1;
true
// or the pattern ends here
// this enables the existing globbing mechanism
} else if i == chars.len() {
true
} else {
false
}
} else {
false
};
let tokens_len = tokens.len();
if is_valid {
// collapse consecutive AnyRecursiveSequence to a single one
if !(tokens_len > 1 && tokens[tokens_len - 1] == AnyRecursiveSequence) {
is_recursive = true;
tokens.push(AnyRecursiveSequence);
}
} else {
// treat invalid sequences literally
tokens.push(Char('*'));
tokens.push(Char('*'));
}
} else {
tokens.push(AnySequence);
}
}
'[' => {
if i <= chars.len() - 4 && chars[i + 1] == '!' {
match chars.slice_from(i + 3).position_elem(&']') {
None => (),
Some(j) => {
let chars = chars.slice(i + 2, i + 3 + j);
let cs = parse_char_specifiers(chars);
tokens.push(AnyExcept(cs));
i += j + 4;
continue;
}
}
}
else if i <= chars.len() - 3 && chars[i + 1] != '!' {
match chars.slice_from(i + 2).position_elem(&']') {
None => (),
Some(j) => {
let cs = parse_char_specifiers(chars.slice(i + 1, i + 2 + j));
tokens.push(AnyWithin(cs));
i += j + 3;
continue;
}
}
}
// if we get here then this is not a valid range pattern
tokens.push(Char('['));
i += 1;
}
c => {
tokens.push(Char(c));
i += 1;
}
}
}
Pattern { tokens: tokens, is_recursive: is_recursive }
}
/// Escape metacharacters within the given string by surrounding them in
/// brackets. The resulting string will, when compiled into a `Pattern`,
/// match the input string and nothing else.
pub fn escape(s: &str) -> String {
let mut escaped = String::new();
for c in s.chars() {
match c {
// note that ! does not need escaping because it is only special inside brackets
'?' | '*' | '[' | ']' => {
escaped.push('[');
escaped.push(c);
escaped.push(']');
}
c => {
escaped.push(c);
}
}
}
escaped
}
/// Return if the given `str` matches this `Pattern` using the default
/// match options (i.e. `MatchOptions::new()`).
///
/// # Example
///
/// ```rust
/// use glob::Pattern;
///
/// assert!(Pattern::new("c?t").matches("cat"));
/// assert!(Pattern::new("k[!e]tteh").matches("kitteh"));
/// assert!(Pattern::new("d*g").matches("doog"));
/// ```
pub fn matches(&self, str: &str) -> bool {
self.matches_with(str, &MatchOptions::new())
}
/// Return if the given `Path`, when converted to a `str`, matches this `Pattern`
/// using the default match options (i.e. `MatchOptions::new()`).
pub fn matches_path(&self, path: &Path) -> bool {
// FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().map_or(false, |s| {
self.matches(s)
})
}
/// Return if the given `str` matches this `Pattern` using the specified match options.
pub fn matches_with(&self, str: &str, options: &MatchOptions) -> bool {
self.matches_from(None, str, 0, options) == Match
}
/// Return if the given `Path`, when converted to a `str`, matches this `Pattern`
/// using the specified match options.
pub fn matches_path_with(&self, path: &Path, options: &MatchOptions) -> bool {
// FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().map_or(false, |s| {
self.matches_with(s, options)
})
}
fn matches_from(&self,
prev_char: Option<char>,
mut file: &str,
i: uint,
options: &MatchOptions) -> MatchResult {
let prev_char = Cell::new(prev_char);
let require_literal = |c| {
(options.require_literal_separator && is_sep(c)) ||
(options.require_literal_leading_dot && c == '.'
&& is_sep(prev_char.get().unwrap_or('/')))
};
for (ti, token) in self.tokens.slice_from(i).iter().enumerate() {
match *token {
AnySequence | AnyRecursiveSequence => {
loop {
match self.matches_from(prev_char.get(), file, i + ti + 1, options) {
SubPatternDoesntMatch => (), // keep trying
m => return m,
}
let (c, next) = match file.slice_shift_char() {
None => return EntirePatternDoesntMatch,
Some(pair) => pair
};
if let AnySequence = *token {
if require_literal(c) {
return SubPatternDoesntMatch;
}
}
prev_char.set(Some(c));
file = next;
}
}
_ => {
let (c, next) = match file.slice_shift_char() {
None => return EntirePatternDoesntMatch,
Some(pair) => pair
};
let matches = match *token {
AnyChar => {
!require_literal(c)
}
AnyWithin(ref specifiers) => {
!require_literal(c) &&
in_char_specifiers(specifiers.as_slice(),
c,
options)
}
AnyExcept(ref specifiers) => {
!require_literal(c) &&
!in_char_specifiers(specifiers.as_slice(),
c,
options)
}
Char(c2) => {
chars_eq(c, c2, options.case_sensitive)
}
AnySequence | AnyRecursiveSequence => {
unreachable!()
}
};
if !matches {
return SubPatternDoesntMatch;
}
prev_char.set(Some(c));
file = next;
}
}
}
if file.is_empty() {
Match
} else {
SubPatternDoesntMatch
}
}
}
// Fills `todo` with paths under `path` to be matched by `patterns[idx]`,
// special-casing patterns to match `.` and `..`, and avoiding `readdir()`
// calls when there are no metacharacters in the pattern.
fn fill_todo(todo: &mut Vec<(Path, uint)>, patterns: &[Pattern], idx: uint, path: &Path,
options: &MatchOptions) {
// convert a pattern that's just many Char(_) to a string
fn pattern_as_str(pattern: &Pattern) -> Option<String> {
let mut s = String::new();
for token in pattern.tokens.iter() {
match *token {
Char(c) => s.push(c),
_ => return None
}
}
return Some(s);
}
let add = |todo: &mut Vec<_>, next_path: Path| {
if idx + 1 == patterns.len() {
// We know it's good, so don't make the iterator match this path
// against the pattern again. In particular, it can't match
// . or .. globs since these never show up as path components.
todo.push((next_path, -1 as uint));
} else {
fill_todo(todo, patterns, idx + 1, &next_path, options);
}
};
let pattern = &patterns[idx];
match pattern_as_str(pattern) {
Some(s) => {
// This pattern component doesn't have any metacharacters, so we
// don't need to read the current directory to know where to
// continue. So instead of passing control back to the iterator,
// we can just check for that one entry and potentially recurse
// right away.
let special = "." == s.as_slice() || ".." == s.as_slice();
let next_path = path.join(s.as_slice());
if (special && path.is_dir()) || (!special && next_path.exists()) {
add(todo, next_path);
}
},
None => {
match list_dir_sorted(path) {
Some(entries) => {
todo.extend(entries.into_iter().map(|x|(x, idx)));
// Matching the special directory entries . and .. that refer to
// the current and parent directory respectively requires that
// the pattern has a leading dot, even if the `MatchOptions` field
// `require_literal_leading_dot` is not set.
if pattern.tokens.len() > 0 && pattern.tokens[0] == Char('.') {
for &special in [".", ".."].iter() {
if pattern.matches_with(special, options) {
add(todo, path.join(special));
}
}
}
}
None => {}
}
}
}
}
fn parse_char_specifiers(s: &[char]) -> Vec<CharSpecifier> {
let mut cs = Vec::new();
let mut i = 0;
while i < s.len() {
if i + 3 <= s.len() && s[i + 1] == '-' {
cs.push(CharRange(s[i], s[i + 2]));
i += 3;
} else {
cs.push(SingleChar(s[i]));
i += 1;
}
}
cs
}
fn in_char_specifiers(specifiers: &[CharSpecifier], c: char, options: &MatchOptions) -> bool {
for &specifier in specifiers.iter() {
match specifier {
SingleChar(sc) => {
if chars_eq(c, sc, options.case_sensitive) {
return true;
}
}
CharRange(start, end) => {
// FIXME: work with non-ascii chars properly (issue #1347)
if !options.case_sensitive && c.is_ascii() && start.is_ascii() && end.is_ascii() {
let start = start.to_ascii_lowercase();
let end = end.to_ascii_lowercase();
let start_up = start.to_uppercase();
let end_up = end.to_uppercase();
// only allow case insensitive matching when
// both start and end are within a-z or A-Z
if start != start_up && end != end_up {
let c = c.to_ascii_lowercase();
if c >= start && c <= end {
return true;
}
}
}
if c >= start && c <= end {
return true;
}
}
}
}
false
}
/// A helper function to determine if two chars are (possibly case-insensitively) equal.
fn chars_eq(a: char, b: char, case_sensitive: bool) -> bool {
if cfg!(windows) && path::windows::is_sep(a) && path::windows::is_sep(b) {
true
} else if !case_sensitive && a.is_ascii() && b.is_ascii() {
// FIXME: work with non-ascii chars properly (issue #9084)
a.to_ascii_lowercase() == b.to_ascii_lowercase()
} else {
a == b
}
}
/// Configuration options to modify the behaviour of `Pattern::matches_with(..)`
#[allow(missing_copy_implementations)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct MatchOptions {
/// Whether or not patterns should be matched in a case-sensitive manner. This
/// currently only considers upper/lower case relationships between ASCII characters,
/// but in future this might be extended to work with Unicode.
pub case_sensitive: bool,
/// If this is true then path-component separator characters (e.g. `/` on Posix)
/// must be matched by a literal `/`, rather than by `*` or `?` or `[...]`
pub require_literal_separator: bool,
/// If this is true then paths that contain components that start with a `.` will
/// not match unless the `.` appears literally in the pattern: `*`, `?` or `[...]`
/// will not match. This is useful because such files are conventionally considered
/// hidden on Unix systems and it might be desirable to skip them when listing files.
pub require_literal_leading_dot: bool
}
impl MatchOptions {
/// Constructs a new `MatchOptions` with default field values. This is used
/// when calling functions that do not take an explicit `MatchOptions` parameter.
///
/// This function always returns this value:
///
/// ```rust,ignore
/// MatchOptions {
/// case_sensitive: true,
/// require_literal_separator: false.
/// require_literal_leading_dot: false
/// }
/// ```
pub fn new() -> MatchOptions {
MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
}
}
}
#[cfg(test)]
mod test {
use std::os;
use super::{glob, Pattern, MatchOptions};
#[test]
fn test_absolute_pattern() {
// assume that the filesystem is not empty!
assert!(glob("/*").next().is_some());
assert!(glob("//").next().is_some());
// check windows absolute paths with host/device components
let root_with_device = os::getcwd().unwrap().root_path().unwrap().join("*");
// FIXME (#9639): This needs to handle non-utf8 paths
assert!(glob(root_with_device.as_str().unwrap()).next().is_some());
}
#[test]
fn test_wildcards() {
assert!(Pattern::new("a*b").matches("a_b"));
assert!(Pattern::new("a**b").matches("a**b"));
assert!(Pattern::new("a*b*c").matches("abc"));
assert!(!Pattern::new("a*b*c").matches("abcd"));
assert!(Pattern::new("a*b*c").matches("a_b_c"));
assert!(Pattern::new("a*b*c").matches("a___b___c"));
assert!(Pattern::new("abc*abc*abc").matches("abcabcabcabcabcabcabc"));
assert!(!Pattern::new("abc*abc*abc").matches("abcabcabcabcabcabcabca"));
assert!(Pattern::new("a*a*a*a*a*a*a*a*a").matches("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
assert!(Pattern::new("a*b[xyz]c*d").matches("abxcdbxcddd"));
}
#[test]
fn test_recursive_wildcards() {
let pat = Pattern::new("some/**/needle.txt");
assert!(pat.matches("some/needle.txt"));
assert!(pat.matches("some/one/needle.txt"));
assert!(pat.matches("some/one/two/needle.txt"));
assert!(pat.matches("some/other/needle.txt"));
assert!(!pat.matches("some/other/notthis.txt"));
// a single ** should be valid, for globs
assert!(Pattern::new("**").is_recursive);
// collapse consecutive wildcards
let pat = Pattern::new("some/**/**/needle.txt");
assert!(pat.matches("some/needle.txt"));
assert!(pat.matches("some/one/needle.txt"));
assert!(pat.matches("some/one/two/needle.txt"));
assert!(pat.matches("some/other/needle.txt"));
assert!(!pat.matches("some/other/notthis.txt"));
// recursive patterns should form a single
// path component with nothing else in them
// otherwise they're treated literally
let pat = Pattern::new("some/**b");
assert!(pat.matches("some/**b"));
assert!(!pat.matches("some/lolb"));
let pat = Pattern::new("some/b**");
assert!(pat.matches("some/b**"));
assert!(!pat.matches("some/bob"));
assert!(!pat.matches("some/bob/lol"));
// ** can begin the pattern
let pat = Pattern::new("**/test");
assert!(pat.matches("one/two/test"));
assert!(pat.matches("one/test"));
assert!(pat.matches("test"));
// /** can begin the pattern
let pat = Pattern::new("/**/test");
assert!(pat.matches("/one/two/test"));
assert!(pat.matches("/one/test"));
assert!(pat.matches("/test"));
assert!(!pat.matches("/one/notthis"));
assert!(!pat.matches("/notthis"));
// more than 2 consecutive wildcards and they're all treated literally
assert!(Pattern::new("a***b").matches("a***b"));
}
#[test]
fn test_lots_of_files() {
// this is a good test because it touches lots of differently named files
glob("/*/*/*/*").skip(10000).next();
}
#[test]
fn test_range_pattern() {
let pat = Pattern::new("a[0-9]b");
for i in range(0u, 10) {
assert!(pat.matches(format!("a{}b", i).as_slice()));
}
assert!(!pat.matches("a_b"));
let pat = Pattern::new("a[!0-9]b");
for i in range(0u, 10) {
assert!(!pat.matches(format!("a{}b", i).as_slice()));
}
assert!(pat.matches("a_b"));
let pats = ["[a-z123]", "[1a-z23]", "[123a-z]"];
for &p in pats.iter() {
let pat = Pattern::new(p);
for c in "abcdefghijklmnopqrstuvwxyz".chars() {
assert!(pat.matches(c.to_string().as_slice()));
}
for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() {
let options = MatchOptions {case_sensitive: false, .. MatchOptions::new()};
assert!(pat.matches_with(c.to_string().as_slice(), &options));
}
assert!(pat.matches("1"));
assert!(pat.matches("2"));
assert!(pat.matches("3"));
}
let pats = ["[abc-]", "[-abc]", "[a-c-]"];
for &p in pats.iter() {
let pat = Pattern::new(p);
assert!(pat.matches("a"));
assert!(pat.matches("b"));
assert!(pat.matches("c"));
assert!(pat.matches("-"));
assert!(!pat.matches("d"));
}
let pat = Pattern::new("[2-1]");
assert!(!pat.matches("1"));
assert!(!pat.matches("2"));
assert!(Pattern::new("[-]").matches("-"));
assert!(!Pattern::new("[!-]").matches("-"));
}
#[test]
fn test_unclosed_bracket() {
// unclosed `[` should be treated literally
assert!(Pattern::new("abc[def").matches("abc[def"));
assert!(Pattern::new("abc[!def").matches("abc[!def"));
assert!(Pattern::new("abc[").matches("abc["));
assert!(Pattern::new("abc[!").matches("abc[!"));
assert!(Pattern::new("abc[d").matches("abc[d"));
assert!(Pattern::new("abc[!d").matches("abc[!d"));
assert!(Pattern::new("abc[]").matches("abc[]"));
assert!(Pattern::new("abc[!]").matches("abc[!]"));
}
#[test]
fn test_pattern_matches() {
let txt_pat = Pattern::new("*hello.txt");
assert!(txt_pat.matches("hello.txt"));
assert!(txt_pat.matches("gareth_says_hello.txt"));
assert!(txt_pat.matches("some/path/to/hello.txt"));
assert!(txt_pat.matches("some\\path\\to\\hello.txt"));
assert!(txt_pat.matches("/an/absolute/path/to/hello.txt"));
assert!(!txt_pat.matches("hello.txt-and-then-some"));
assert!(!txt_pat.matches("goodbye.txt"));
let dir_pat = Pattern::new("*some/path/to/hello.txt");
assert!(dir_pat.matches("some/path/to/hello.txt"));
assert!(dir_pat.matches("a/bigger/some/path/to/hello.txt"));
assert!(!dir_pat.matches("some/path/to/hello.txt-and-then-some"));
assert!(!dir_pat.matches("some/other/path/to/hello.txt"));
}
#[test]
fn test_pattern_escape() {
let s = "_[_]_?_*_!_";
assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_string());
assert!(Pattern::new(Pattern::escape(s).as_slice()).matches(s));
}
#[test]
fn test_pattern_matches_case_insensitive() {
let pat = Pattern::new("aBcDeFg");
let options = MatchOptions {
case_sensitive: false,
require_literal_separator: false,
require_literal_leading_dot: false
};
assert!(pat.matches_with("aBcDeFg", &options));
assert!(pat.matches_with("abcdefg", &options));
assert!(pat.matches_with("ABCDEFG", &options));
assert!(pat.matches_with("AbCdEfG", &options));
}
#[test]
fn test_pattern_matches_case_insensitive_range() {
let pat_within = Pattern::new("[a]");
let pat_except = Pattern::new("[!a]");
let options_case_insensitive = MatchOptions {
case_sensitive: false,
require_literal_separator: false,
require_literal_leading_dot: false
};
let options_case_sensitive = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
};
assert!(pat_within.matches_with("a", &options_case_insensitive));
assert!(pat_within.matches_with("A", &options_case_insensitive));
assert!(!pat_within.matches_with("A", &options_case_sensitive));
assert!(!pat_except.matches_with("a", &options_case_insensitive));
assert!(!pat_except.matches_with("A", &options_case_insensitive));
assert!(pat_except.matches_with("A", &options_case_sensitive));
}
#[test]
fn test_pattern_matches_require_literal_separator() {
let options_require_literal = MatchOptions {
case_sensitive: true,
require_literal_separator: true,
require_literal_leading_dot: false
};
let options_not_require_literal = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
};
assert!(Pattern::new("abc/def").matches_with("abc/def", &options_require_literal));
assert!(!Pattern::new("abc?def").matches_with("abc/def", &options_require_literal));
assert!(!Pattern::new("abc*def").matches_with("abc/def", &options_require_literal));
assert!(!Pattern::new("abc[/]def").matches_with("abc/def", &options_require_literal));
assert!(Pattern::new("abc/def").matches_with("abc/def", &options_not_require_literal));
assert!(Pattern::new("abc?def").matches_with("abc/def", &options_not_require_literal));
assert!(Pattern::new("abc*def").matches_with("abc/def", &options_not_require_literal));
assert!(Pattern::new("abc[/]def").matches_with("abc/def", &options_not_require_literal));
}
#[test]
fn test_pattern_matches_require_literal_leading_dot() {
let options_require_literal_leading_dot = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: true
};
let options_not_require_literal_leading_dot = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
};
let f = |options| Pattern::new("*.txt").matches_with(".hello.txt", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
let f = |options| Pattern::new(".*.*").matches_with(".hello.txt", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/bbb/*").matches_with("aaa/bbb/.ccc", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/bbb/*").matches_with("aaa/bbb/c.c.c.", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/bbb/.*").matches_with("aaa/bbb/.ccc", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/?bbb").matches_with("aaa/.bbb", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/[.]bbb").matches_with("aaa/.bbb", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
}
#[test]
fn test_matches_path() {
// on windows, (Path::new("a/b").as_str().unwrap() == "a\\b"), so this
// tests that / and \ are considered equivalent on windows
assert!(Pattern::new("a/b").matches_path(&Path::new("a/b")));
}
}
Closure changes in rustc
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Support for matching file paths against Unix shell style patterns.
//!
//! The `glob` and `glob_with` functions, in concert with the `Paths`
//! type, allow querying the filesystem for all files that match a particular
//! pattern - just like the libc `glob` function (for an example see the `glob`
//! documentation). The methods on the `Pattern` type provide functionality
//! for checking if individual paths match a particular pattern - in a similar
//! manner to the libc `fnmatch` function
//! For consistency across platforms, and for Windows support, this module
//! is implemented entirely in Rust rather than deferring to the libc
//! `glob`/`fnmatch` functions.
#![feature(associated_types)]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/glob/")]
use std::ascii::AsciiExt;
use std::cell::Cell;
use std::{cmp, os, path};
use std::io::fs::{self, PathExtensions};
use std::path::is_sep;
use std::string::String;
use PatternToken::{Char, AnyChar, AnySequence, AnyRecursiveSequence, AnyWithin, AnyExcept};
use CharSpecifier::{SingleChar, CharRange};
use MatchResult::{Match, SubPatternDoesntMatch, EntirePatternDoesntMatch};
/// An iterator that yields Paths from the filesystem that match a particular
/// pattern - see the `glob` function for more details.
pub struct Paths {
dir_patterns: Vec<Pattern>,
require_dir: bool,
options: MatchOptions,
todo: Vec<(Path,uint)>,
}
/// Return an iterator that produces all the Paths that match the given pattern,
/// which may be absolute or relative to the current working directory.
///
/// This method uses the default match options and is equivalent to calling
/// `glob_with(pattern, MatchOptions::new())`. Use `glob_with` directly if you
/// want to use non-default match options.
///
/// # Example
///
/// Consider a directory `/media/pictures` containing only the files `kittens.jpg`,
/// `puppies.jpg` and `hamsters.gif`:
///
/// ```rust
/// use glob::glob;
///
/// for path in glob("/media/pictures/*.jpg") {
/// println!("{}", path.display());
/// }
/// ```
///
/// The above code will print:
///
/// ```ignore
/// /media/pictures/kittens.jpg
/// /media/pictures/puppies.jpg
/// ```
///
pub fn glob(pattern: &str) -> Paths {
glob_with(pattern, &MatchOptions::new())
}
/// Return an iterator that produces all the Paths that match the given pattern,
/// which may be absolute or relative to the current working directory.
///
/// This function accepts Unix shell style patterns as described by `Pattern::new(..)`.
/// The options given are passed through unchanged to `Pattern::matches_with(..)` with
/// the exception that `require_literal_separator` is always set to `true` regardless of the
/// value passed to this function.
///
/// Paths are yielded in alphabetical order, as absolute paths.
pub fn glob_with(pattern: &str, options: &MatchOptions) -> Paths {
#[cfg(windows)]
fn check_windows_verbatim(p: &Path) -> bool { path::windows::is_verbatim(p) }
#[cfg(not(windows))]
fn check_windows_verbatim(_: &Path) -> bool { false }
// calculate root this way to handle volume-relative Windows paths correctly
let mut root = os::getcwd().unwrap();
let pat_root = Path::new(pattern).root_path();
if pat_root.is_some() {
if check_windows_verbatim(pat_root.as_ref().unwrap()) {
// FIXME: How do we want to handle verbatim paths? I'm inclined to return nothing,
// since we can't very well find all UNC shares with a 1-letter server name.
return Paths {
dir_patterns: Vec::new(),
require_dir: false,
options: options.clone(),
todo: Vec::new(),
};
}
root.push(pat_root.as_ref().unwrap());
}
let root_len = pat_root.map_or(0u, |p| p.as_vec().len());
let dir_patterns = pattern.slice_from(cmp::min(root_len, pattern.len()))
.split_terminator(is_sep)
.map(|s| Pattern::new(s))
.collect::<Vec<Pattern>>();
let require_dir = pattern.chars().next_back().map(is_sep) == Some(true);
let mut todo = Vec::new();
if dir_patterns.len() > 0 {
// Shouldn't happen, but we're using -1 as a special index.
assert!(dir_patterns.len() < -1 as uint);
fill_todo(&mut todo, dir_patterns.as_slice(), 0, &root, options);
}
Paths {
dir_patterns: dir_patterns,
require_dir: require_dir,
options: options.clone(),
todo: todo,
}
}
impl Iterator for Paths {
type Item = Path;
fn next(&mut self) -> Option<Path> {
loop {
if self.dir_patterns.is_empty() || self.todo.is_empty() {
return None;
}
let (path,idx) = self.todo.pop().unwrap();
// idx -1: was already checked by fill_todo, maybe path was '.' or
// '..' that we can't match here because of normalization.
if idx == -1 as uint {
if self.require_dir && !path.is_dir() { continue; }
return Some(path);
}
let ref pattern = self.dir_patterns[idx];
let is_recursive = pattern.is_recursive;
let is_last = idx == self.dir_patterns.len() - 1;
// special casing for recursive patterns when globbing
// if it's a recursive pattern and it's not the last dir_patterns,
// test if it matches the next non-recursive pattern,
// if it does, then move to the pattern after the next pattern
// otherwise accept the path based on the recursive pattern
// and remain on the recursive pattern
if is_recursive && !is_last {
// the next non-recursive pattern
let mut next = idx + 1;
// collapse consecutive recursive patterns
while next < self.dir_patterns.len() && self.dir_patterns[next].is_recursive {
next += 1;
}
// no non-recursive patterns follow the current one
// so auto-accept all remaining recursive paths
if next == self.dir_patterns.len() {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
next - 1, &path, &self.options);
return Some(path);
}
let ref next_pattern = self.dir_patterns[next];
let is_match = next_pattern.matches_with(match path.filename_str() {
// this ugly match needs to go here to avoid a borrowck error
None => {
// FIXME (#9639): How do we handle non-utf8 filenames? Ignore them for now
// Ideally we'd still match them against a *
continue;
}
Some(x) => x
}, &self.options);
// determine how to advance
let (current_idx, next_idx) =
if is_match {
// accept the pattern after the next non-recursive pattern
(next, next + 1)
} else {
// next pattern still hasn't matched
// so stay on this recursive pattern
(next - 1, next - 1)
};
if current_idx == self.dir_patterns.len() - 1 {
// it is not possible for a pattern to match a directory *AND* its children
// so we don't need to check the children
if !self.require_dir || path.is_dir() {
return Some(path);
}
} else {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
next_idx, &path, &self.options);
}
}
// it's recursive and it's the last pattern
// automatically match everything else recursively
else if is_recursive && is_last {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
idx, &path, &self.options);
return Some(path);
}
// not recursive, so match normally
else if pattern.matches_with(match path.filename_str() {
// this ugly match needs to go here to avoid a borrowck error
None => {
// FIXME (#9639): How do we handle non-utf8 filenames? Ignore them for now
// Ideally we'd still match them against a *
continue;
}
Some(x) => x
}, &self.options) {
if idx == self.dir_patterns.len() - 1 {
// it is not possible for a pattern to match a directory *AND* its children
// so we don't need to check the children
if !self.require_dir || path.is_dir() {
return Some(path);
}
} else {
fill_todo(&mut self.todo, self.dir_patterns.as_slice(),
idx + 1, &path, &self.options);
}
}
}
}
}
fn list_dir_sorted(path: &Path) -> Option<Vec<Path>> {
match fs::readdir(path) {
Ok(mut children) => {
children.sort_by(|p1, p2| p2.filename().cmp(&p1.filename()));
Some(children.into_iter().collect())
}
Err(..) => None
}
}
/// A compiled Unix shell style pattern.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct Pattern {
tokens: Vec<PatternToken>,
is_recursive: bool,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum PatternToken {
Char(char),
AnyChar,
AnySequence,
AnyRecursiveSequence,
AnyWithin(Vec<CharSpecifier> ),
AnyExcept(Vec<CharSpecifier> )
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum CharSpecifier {
SingleChar(char),
CharRange(char, char)
}
#[derive(Copy, PartialEq)]
enum MatchResult {
Match,
SubPatternDoesntMatch,
EntirePatternDoesntMatch
}
impl Pattern {
/// This function compiles Unix shell style patterns: `?` matches any single
/// character, `*` matches any (possibly empty) sequence of characters and
/// `[...]` matches any character inside the brackets, unless the first
/// character is `!` in which case it matches any character except those
/// between the `!` and the `]`. Character sequences can also specify ranges
/// of characters, as ordered by Unicode, so e.g. `[0-9]` specifies any
/// character between 0 and 9 inclusive.
///
/// A sequence of two `*` characters, `**`, acts like a single `*` except
/// that it also matches path separators, making it useful for matching
/// on arbitrary subdirectories. This sequence **must** form a single path
/// component, so neither `**a` nor `b**` is valid and will instead be treated
/// literally. A sequence of more than two consecutive `*` characters is
/// treated literally.
///
/// The metacharacters `?`, `*`, `[`, `]` can be matched by using brackets
/// (e.g. `[?]`). When a `]` occurs immediately following `[` or `[!` then
/// it is interpreted as being part of, rather then ending, the character
/// set, so `]` and NOT `]` can be matched by `[]]` and `[!]]` respectively.
/// The `-` character can be specified inside a character sequence pattern by
/// placing it at the start or the end, e.g. `[abc-]`.
///
/// When a `[` does not have a closing `]` before the end of the string then
/// the `[` will be treated literally.
pub fn new(pattern: &str) -> Pattern {
let chars = pattern.chars().collect::<Vec<_>>();
let mut tokens = Vec::new();
let mut is_recursive = false;
let mut i = 0;
while i < chars.len() {
match chars[i] {
'?' => {
tokens.push(AnyChar);
i += 1;
}
'*' => {
let old = i;
while i < chars.len() && chars[i] == '*' {
i += 1;
}
let count = i - old;
if count > 2 {
for _ in range(0u, count) {
tokens.push(Char('*'));
}
} else if count == 2 {
// ** can only be an entire path component
// i.e. a/**/b is valid, but a**/b or a/**b is not
// invalid matches are treated literally
let is_valid =
// begins with '/' or is the beginning of the pattern
if i == 2 || chars[i - count - 1] == '/' {
// it ends in a '/'
if i < chars.len() && chars[i] == '/' {
i += 1;
true
// or the pattern ends here
// this enables the existing globbing mechanism
} else if i == chars.len() {
true
} else {
false
}
} else {
false
};
let tokens_len = tokens.len();
if is_valid {
// collapse consecutive AnyRecursiveSequence to a single one
if !(tokens_len > 1 && tokens[tokens_len - 1] == AnyRecursiveSequence) {
is_recursive = true;
tokens.push(AnyRecursiveSequence);
}
} else {
// treat invalid sequences literally
tokens.push(Char('*'));
tokens.push(Char('*'));
}
} else {
tokens.push(AnySequence);
}
}
'[' => {
if i <= chars.len() - 4 && chars[i + 1] == '!' {
match chars.slice_from(i + 3).position_elem(&']') {
None => (),
Some(j) => {
let chars = chars.slice(i + 2, i + 3 + j);
let cs = parse_char_specifiers(chars);
tokens.push(AnyExcept(cs));
i += j + 4;
continue;
}
}
}
else if i <= chars.len() - 3 && chars[i + 1] != '!' {
match chars.slice_from(i + 2).position_elem(&']') {
None => (),
Some(j) => {
let cs = parse_char_specifiers(chars.slice(i + 1, i + 2 + j));
tokens.push(AnyWithin(cs));
i += j + 3;
continue;
}
}
}
// if we get here then this is not a valid range pattern
tokens.push(Char('['));
i += 1;
}
c => {
tokens.push(Char(c));
i += 1;
}
}
}
Pattern { tokens: tokens, is_recursive: is_recursive }
}
/// Escape metacharacters within the given string by surrounding them in
/// brackets. The resulting string will, when compiled into a `Pattern`,
/// match the input string and nothing else.
pub fn escape(s: &str) -> String {
let mut escaped = String::new();
for c in s.chars() {
match c {
// note that ! does not need escaping because it is only special inside brackets
'?' | '*' | '[' | ']' => {
escaped.push('[');
escaped.push(c);
escaped.push(']');
}
c => {
escaped.push(c);
}
}
}
escaped
}
/// Return if the given `str` matches this `Pattern` using the default
/// match options (i.e. `MatchOptions::new()`).
///
/// # Example
///
/// ```rust
/// use glob::Pattern;
///
/// assert!(Pattern::new("c?t").matches("cat"));
/// assert!(Pattern::new("k[!e]tteh").matches("kitteh"));
/// assert!(Pattern::new("d*g").matches("doog"));
/// ```
pub fn matches(&self, str: &str) -> bool {
self.matches_with(str, &MatchOptions::new())
}
/// Return if the given `Path`, when converted to a `str`, matches this `Pattern`
/// using the default match options (i.e. `MatchOptions::new()`).
pub fn matches_path(&self, path: &Path) -> bool {
// FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().map_or(false, |s| {
self.matches(s)
})
}
/// Return if the given `str` matches this `Pattern` using the specified match options.
pub fn matches_with(&self, str: &str, options: &MatchOptions) -> bool {
self.matches_from(None, str, 0, options) == Match
}
/// Return if the given `Path`, when converted to a `str`, matches this `Pattern`
/// using the specified match options.
pub fn matches_path_with(&self, path: &Path, options: &MatchOptions) -> bool {
// FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().map_or(false, |s| {
self.matches_with(s, options)
})
}
fn matches_from(&self,
prev_char: Option<char>,
mut file: &str,
i: uint,
options: &MatchOptions) -> MatchResult {
let prev_char = Cell::new(prev_char);
let require_literal = |&: c| {
(options.require_literal_separator && is_sep(c)) ||
(options.require_literal_leading_dot && c == '.'
&& is_sep(prev_char.get().unwrap_or('/')))
};
for (ti, token) in self.tokens.slice_from(i).iter().enumerate() {
match *token {
AnySequence | AnyRecursiveSequence => {
loop {
match self.matches_from(prev_char.get(), file, i + ti + 1, options) {
SubPatternDoesntMatch => (), // keep trying
m => return m,
}
let (c, next) = match file.slice_shift_char() {
None => return EntirePatternDoesntMatch,
Some(pair) => pair
};
if let AnySequence = *token {
if require_literal(c) {
return SubPatternDoesntMatch;
}
}
prev_char.set(Some(c));
file = next;
}
}
_ => {
let (c, next) = match file.slice_shift_char() {
None => return EntirePatternDoesntMatch,
Some(pair) => pair
};
let matches = match *token {
AnyChar => {
!require_literal(c)
}
AnyWithin(ref specifiers) => {
!require_literal(c) &&
in_char_specifiers(specifiers.as_slice(),
c,
options)
}
AnyExcept(ref specifiers) => {
!require_literal(c) &&
!in_char_specifiers(specifiers.as_slice(),
c,
options)
}
Char(c2) => {
chars_eq(c, c2, options.case_sensitive)
}
AnySequence | AnyRecursiveSequence => {
unreachable!()
}
};
if !matches {
return SubPatternDoesntMatch;
}
prev_char.set(Some(c));
file = next;
}
}
}
if file.is_empty() {
Match
} else {
SubPatternDoesntMatch
}
}
}
// Fills `todo` with paths under `path` to be matched by `patterns[idx]`,
// special-casing patterns to match `.` and `..`, and avoiding `readdir()`
// calls when there are no metacharacters in the pattern.
fn fill_todo(todo: &mut Vec<(Path, uint)>, patterns: &[Pattern], idx: uint, path: &Path,
options: &MatchOptions) {
// convert a pattern that's just many Char(_) to a string
fn pattern_as_str(pattern: &Pattern) -> Option<String> {
let mut s = String::new();
for token in pattern.tokens.iter() {
match *token {
Char(c) => s.push(c),
_ => return None
}
}
return Some(s);
}
let add = |&: todo: &mut Vec<_>, next_path: Path| {
if idx + 1 == patterns.len() {
// We know it's good, so don't make the iterator match this path
// against the pattern again. In particular, it can't match
// . or .. globs since these never show up as path components.
todo.push((next_path, -1 as uint));
} else {
fill_todo(todo, patterns, idx + 1, &next_path, options);
}
};
let pattern = &patterns[idx];
match pattern_as_str(pattern) {
Some(s) => {
// This pattern component doesn't have any metacharacters, so we
// don't need to read the current directory to know where to
// continue. So instead of passing control back to the iterator,
// we can just check for that one entry and potentially recurse
// right away.
let special = "." == s.as_slice() || ".." == s.as_slice();
let next_path = path.join(s.as_slice());
if (special && path.is_dir()) || (!special && next_path.exists()) {
add(todo, next_path);
}
},
None => {
match list_dir_sorted(path) {
Some(entries) => {
todo.extend(entries.into_iter().map(|x|(x, idx)));
// Matching the special directory entries . and .. that refer to
// the current and parent directory respectively requires that
// the pattern has a leading dot, even if the `MatchOptions` field
// `require_literal_leading_dot` is not set.
if pattern.tokens.len() > 0 && pattern.tokens[0] == Char('.') {
for &special in [".", ".."].iter() {
if pattern.matches_with(special, options) {
add(todo, path.join(special));
}
}
}
}
None => {}
}
}
}
}
fn parse_char_specifiers(s: &[char]) -> Vec<CharSpecifier> {
let mut cs = Vec::new();
let mut i = 0;
while i < s.len() {
if i + 3 <= s.len() && s[i + 1] == '-' {
cs.push(CharRange(s[i], s[i + 2]));
i += 3;
} else {
cs.push(SingleChar(s[i]));
i += 1;
}
}
cs
}
fn in_char_specifiers(specifiers: &[CharSpecifier], c: char, options: &MatchOptions) -> bool {
for &specifier in specifiers.iter() {
match specifier {
SingleChar(sc) => {
if chars_eq(c, sc, options.case_sensitive) {
return true;
}
}
CharRange(start, end) => {
// FIXME: work with non-ascii chars properly (issue #1347)
if !options.case_sensitive && c.is_ascii() && start.is_ascii() && end.is_ascii() {
let start = start.to_ascii_lowercase();
let end = end.to_ascii_lowercase();
let start_up = start.to_uppercase();
let end_up = end.to_uppercase();
// only allow case insensitive matching when
// both start and end are within a-z or A-Z
if start != start_up && end != end_up {
let c = c.to_ascii_lowercase();
if c >= start && c <= end {
return true;
}
}
}
if c >= start && c <= end {
return true;
}
}
}
}
false
}
/// A helper function to determine if two chars are (possibly case-insensitively) equal.
fn chars_eq(a: char, b: char, case_sensitive: bool) -> bool {
if cfg!(windows) && path::windows::is_sep(a) && path::windows::is_sep(b) {
true
} else if !case_sensitive && a.is_ascii() && b.is_ascii() {
// FIXME: work with non-ascii chars properly (issue #9084)
a.to_ascii_lowercase() == b.to_ascii_lowercase()
} else {
a == b
}
}
/// Configuration options to modify the behaviour of `Pattern::matches_with(..)`
#[allow(missing_copy_implementations)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct MatchOptions {
/// Whether or not patterns should be matched in a case-sensitive manner. This
/// currently only considers upper/lower case relationships between ASCII characters,
/// but in future this might be extended to work with Unicode.
pub case_sensitive: bool,
/// If this is true then path-component separator characters (e.g. `/` on Posix)
/// must be matched by a literal `/`, rather than by `*` or `?` or `[...]`
pub require_literal_separator: bool,
/// If this is true then paths that contain components that start with a `.` will
/// not match unless the `.` appears literally in the pattern: `*`, `?` or `[...]`
/// will not match. This is useful because such files are conventionally considered
/// hidden on Unix systems and it might be desirable to skip them when listing files.
pub require_literal_leading_dot: bool
}
impl MatchOptions {
/// Constructs a new `MatchOptions` with default field values. This is used
/// when calling functions that do not take an explicit `MatchOptions` parameter.
///
/// This function always returns this value:
///
/// ```rust,ignore
/// MatchOptions {
/// case_sensitive: true,
/// require_literal_separator: false.
/// require_literal_leading_dot: false
/// }
/// ```
pub fn new() -> MatchOptions {
MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
}
}
}
#[cfg(test)]
mod test {
use std::os;
use super::{glob, Pattern, MatchOptions};
#[test]
fn test_absolute_pattern() {
// assume that the filesystem is not empty!
assert!(glob("/*").next().is_some());
assert!(glob("//").next().is_some());
// check windows absolute paths with host/device components
let root_with_device = os::getcwd().unwrap().root_path().unwrap().join("*");
// FIXME (#9639): This needs to handle non-utf8 paths
assert!(glob(root_with_device.as_str().unwrap()).next().is_some());
}
#[test]
fn test_wildcards() {
assert!(Pattern::new("a*b").matches("a_b"));
assert!(Pattern::new("a**b").matches("a**b"));
assert!(Pattern::new("a*b*c").matches("abc"));
assert!(!Pattern::new("a*b*c").matches("abcd"));
assert!(Pattern::new("a*b*c").matches("a_b_c"));
assert!(Pattern::new("a*b*c").matches("a___b___c"));
assert!(Pattern::new("abc*abc*abc").matches("abcabcabcabcabcabcabc"));
assert!(!Pattern::new("abc*abc*abc").matches("abcabcabcabcabcabcabca"));
assert!(Pattern::new("a*a*a*a*a*a*a*a*a").matches("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
assert!(Pattern::new("a*b[xyz]c*d").matches("abxcdbxcddd"));
}
#[test]
fn test_recursive_wildcards() {
let pat = Pattern::new("some/**/needle.txt");
assert!(pat.matches("some/needle.txt"));
assert!(pat.matches("some/one/needle.txt"));
assert!(pat.matches("some/one/two/needle.txt"));
assert!(pat.matches("some/other/needle.txt"));
assert!(!pat.matches("some/other/notthis.txt"));
// a single ** should be valid, for globs
assert!(Pattern::new("**").is_recursive);
// collapse consecutive wildcards
let pat = Pattern::new("some/**/**/needle.txt");
assert!(pat.matches("some/needle.txt"));
assert!(pat.matches("some/one/needle.txt"));
assert!(pat.matches("some/one/two/needle.txt"));
assert!(pat.matches("some/other/needle.txt"));
assert!(!pat.matches("some/other/notthis.txt"));
// recursive patterns should form a single
// path component with nothing else in them
// otherwise they're treated literally
let pat = Pattern::new("some/**b");
assert!(pat.matches("some/**b"));
assert!(!pat.matches("some/lolb"));
let pat = Pattern::new("some/b**");
assert!(pat.matches("some/b**"));
assert!(!pat.matches("some/bob"));
assert!(!pat.matches("some/bob/lol"));
// ** can begin the pattern
let pat = Pattern::new("**/test");
assert!(pat.matches("one/two/test"));
assert!(pat.matches("one/test"));
assert!(pat.matches("test"));
// /** can begin the pattern
let pat = Pattern::new("/**/test");
assert!(pat.matches("/one/two/test"));
assert!(pat.matches("/one/test"));
assert!(pat.matches("/test"));
assert!(!pat.matches("/one/notthis"));
assert!(!pat.matches("/notthis"));
// more than 2 consecutive wildcards and they're all treated literally
assert!(Pattern::new("a***b").matches("a***b"));
}
#[test]
fn test_lots_of_files() {
// this is a good test because it touches lots of differently named files
glob("/*/*/*/*").skip(10000).next();
}
#[test]
fn test_range_pattern() {
let pat = Pattern::new("a[0-9]b");
for i in range(0u, 10) {
assert!(pat.matches(format!("a{}b", i).as_slice()));
}
assert!(!pat.matches("a_b"));
let pat = Pattern::new("a[!0-9]b");
for i in range(0u, 10) {
assert!(!pat.matches(format!("a{}b", i).as_slice()));
}
assert!(pat.matches("a_b"));
let pats = ["[a-z123]", "[1a-z23]", "[123a-z]"];
for &p in pats.iter() {
let pat = Pattern::new(p);
for c in "abcdefghijklmnopqrstuvwxyz".chars() {
assert!(pat.matches(c.to_string().as_slice()));
}
for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() {
let options = MatchOptions {case_sensitive: false, .. MatchOptions::new()};
assert!(pat.matches_with(c.to_string().as_slice(), &options));
}
assert!(pat.matches("1"));
assert!(pat.matches("2"));
assert!(pat.matches("3"));
}
let pats = ["[abc-]", "[-abc]", "[a-c-]"];
for &p in pats.iter() {
let pat = Pattern::new(p);
assert!(pat.matches("a"));
assert!(pat.matches("b"));
assert!(pat.matches("c"));
assert!(pat.matches("-"));
assert!(!pat.matches("d"));
}
let pat = Pattern::new("[2-1]");
assert!(!pat.matches("1"));
assert!(!pat.matches("2"));
assert!(Pattern::new("[-]").matches("-"));
assert!(!Pattern::new("[!-]").matches("-"));
}
#[test]
fn test_unclosed_bracket() {
// unclosed `[` should be treated literally
assert!(Pattern::new("abc[def").matches("abc[def"));
assert!(Pattern::new("abc[!def").matches("abc[!def"));
assert!(Pattern::new("abc[").matches("abc["));
assert!(Pattern::new("abc[!").matches("abc[!"));
assert!(Pattern::new("abc[d").matches("abc[d"));
assert!(Pattern::new("abc[!d").matches("abc[!d"));
assert!(Pattern::new("abc[]").matches("abc[]"));
assert!(Pattern::new("abc[!]").matches("abc[!]"));
}
#[test]
fn test_pattern_matches() {
let txt_pat = Pattern::new("*hello.txt");
assert!(txt_pat.matches("hello.txt"));
assert!(txt_pat.matches("gareth_says_hello.txt"));
assert!(txt_pat.matches("some/path/to/hello.txt"));
assert!(txt_pat.matches("some\\path\\to\\hello.txt"));
assert!(txt_pat.matches("/an/absolute/path/to/hello.txt"));
assert!(!txt_pat.matches("hello.txt-and-then-some"));
assert!(!txt_pat.matches("goodbye.txt"));
let dir_pat = Pattern::new("*some/path/to/hello.txt");
assert!(dir_pat.matches("some/path/to/hello.txt"));
assert!(dir_pat.matches("a/bigger/some/path/to/hello.txt"));
assert!(!dir_pat.matches("some/path/to/hello.txt-and-then-some"));
assert!(!dir_pat.matches("some/other/path/to/hello.txt"));
}
#[test]
fn test_pattern_escape() {
let s = "_[_]_?_*_!_";
assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_string());
assert!(Pattern::new(Pattern::escape(s).as_slice()).matches(s));
}
#[test]
fn test_pattern_matches_case_insensitive() {
let pat = Pattern::new("aBcDeFg");
let options = MatchOptions {
case_sensitive: false,
require_literal_separator: false,
require_literal_leading_dot: false
};
assert!(pat.matches_with("aBcDeFg", &options));
assert!(pat.matches_with("abcdefg", &options));
assert!(pat.matches_with("ABCDEFG", &options));
assert!(pat.matches_with("AbCdEfG", &options));
}
#[test]
fn test_pattern_matches_case_insensitive_range() {
let pat_within = Pattern::new("[a]");
let pat_except = Pattern::new("[!a]");
let options_case_insensitive = MatchOptions {
case_sensitive: false,
require_literal_separator: false,
require_literal_leading_dot: false
};
let options_case_sensitive = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
};
assert!(pat_within.matches_with("a", &options_case_insensitive));
assert!(pat_within.matches_with("A", &options_case_insensitive));
assert!(!pat_within.matches_with("A", &options_case_sensitive));
assert!(!pat_except.matches_with("a", &options_case_insensitive));
assert!(!pat_except.matches_with("A", &options_case_insensitive));
assert!(pat_except.matches_with("A", &options_case_sensitive));
}
#[test]
fn test_pattern_matches_require_literal_separator() {
let options_require_literal = MatchOptions {
case_sensitive: true,
require_literal_separator: true,
require_literal_leading_dot: false
};
let options_not_require_literal = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
};
assert!(Pattern::new("abc/def").matches_with("abc/def", &options_require_literal));
assert!(!Pattern::new("abc?def").matches_with("abc/def", &options_require_literal));
assert!(!Pattern::new("abc*def").matches_with("abc/def", &options_require_literal));
assert!(!Pattern::new("abc[/]def").matches_with("abc/def", &options_require_literal));
assert!(Pattern::new("abc/def").matches_with("abc/def", &options_not_require_literal));
assert!(Pattern::new("abc?def").matches_with("abc/def", &options_not_require_literal));
assert!(Pattern::new("abc*def").matches_with("abc/def", &options_not_require_literal));
assert!(Pattern::new("abc[/]def").matches_with("abc/def", &options_not_require_literal));
}
#[test]
fn test_pattern_matches_require_literal_leading_dot() {
let options_require_literal_leading_dot = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: true
};
let options_not_require_literal_leading_dot = MatchOptions {
case_sensitive: true,
require_literal_separator: false,
require_literal_leading_dot: false
};
let f = |options| Pattern::new("*.txt").matches_with(".hello.txt", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
let f = |options| Pattern::new(".*.*").matches_with(".hello.txt", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/bbb/*").matches_with("aaa/bbb/.ccc", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/bbb/*").matches_with("aaa/bbb/c.c.c.", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/bbb/.*").matches_with("aaa/bbb/.ccc", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/?bbb").matches_with("aaa/.bbb", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
let f = |options| Pattern::new("aaa/[.]bbb").matches_with("aaa/.bbb", options);
assert!(f(&options_not_require_literal_leading_dot));
assert!(!f(&options_require_literal_leading_dot));
}
#[test]
fn test_matches_path() {
// on windows, (Path::new("a/b").as_str().unwrap() == "a\\b"), so this
// tests that / and \ are considered equivalent on windows
assert!(Pattern::new("a/b").matches_path(&Path::new("a/b")));
}
}
|
extern crate byteorder;
extern crate euclid;
extern crate typed_arena;
extern crate bit_vec;
#[macro_use]
extern crate nom;
#[macro_use]
extern crate error_chain;
extern crate svg; // TODO temp for debugging
use std::borrow::Cow;
use std::io::Write;
use std::str::FromStr;
use std::str;
use std::u8;
use byteorder::{LittleEndian, WriteBytesExt};
use nom::{IResult, Needed, digit, le_i16, le_i32, le_u16, le_u32, le_u8};
pub mod errors;
use errors::{ErrorKind, Result, nom_to_result};
pub mod geom;
pub mod map;
pub mod universe;
pub mod shapeops;
mod vanilladoom;
// FIXME so, this whole file is kind of a mess. i was trying to make the raw binary data available
// for inspection without needing to parse into a whole map object, and that turns out to be
// complicated? designing a wad browsing api is also kinda hard, since... well.
// i'm not entirely sure what i should do to remedy all of this. a Map is fairly heavy-handed, and
// you can still do plenty of interesting stuff without one.
// TODO Doom 64? PlayStation Doom? others?
pub enum SourcePort {
// TODO distinguish doom 1 and 2, since they have different sets of things? final doom?
Doom,
Heretic,
Hexen,
Strife,
Boom,
// TODO boom plus skybox transfer
// TODO try to distinguish versions of these...??
ZDoom,
GZDoom,
// I don't know so much about these
MBF,
Legacy,
Eternity,
Vavoom,
}
pub enum BaseGame {
None, // i.e. this IS a base game, or is a TC, or whatever
Doom1, // TODO distinguish from ultimate doom?
Doom2,
TNTEvilution,
Plutonia,
Heretic,
Hexen,
Strife,
}
#[derive(Debug)]
pub enum MapName {
ExMy(u8, u8),
MAPxx(u8),
}
impl std::fmt::Display for MapName {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
MapName::ExMy(x, y) => write!(f, "E{}M{}", x, y),
MapName::MAPxx(x) => write!(f, "MAP{:02}", x),
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum MapFormat {
Doom,
Hexen,
UDMF,
}
named!(exmy_map_name(&[u8]) -> MapName, do_parse!(
tag!(b"E") >>
e: digit >>
tag!(b"M") >>
m: digit >>
eof!() >>
(MapName::ExMy(
// We already know e and m are digits, so this is all safe
u8::from_str(str::from_utf8(e).unwrap()).unwrap(),
u8::from_str(str::from_utf8(m).unwrap()).unwrap(),
))
));
named!(mapxx_map_name(&[u8]) -> MapName, do_parse!(
tag!(b"MAP") >>
xx: digit >>
eof!() >>
(MapName::MAPxx(
// TODO need to enforce that xx is exactly two digits! and also in [1, 32]
u8::from_str(str::from_utf8(xx).unwrap()).unwrap()
))
));
named!(vanilla_map_name(&[u8]) -> MapName, alt!(exmy_map_name | mapxx_map_name));
#[derive(Copy, Clone, Debug)]
pub enum WADType {
IWAD,
PWAD,
}
// TODO do... these
trait Archive {
// TODO needs a feature atm -- const supports_duplicate_names: bool;
}
trait WAD {
}
// TODO this should be able to contain an arbitrarily-typed entry, right? but when does the type
// detection happen?
pub struct WADEntry<'a> {
pub name: Cow<'a, str>,
pub data: Cow<'a, [u8]>,
}
/// High-level interface to a WAD archive. Does its best to prevent you from producing an invalid
/// WAD. This is probably what you want.
#[allow(dead_code)]
pub struct WADArchive<'a> {
// TODO it would be nice if we could take ownership of the slice somehow, but i don't know how
// to do that really. i also don't know how to tell rust that the entry slices are owned by
// this buffer?
buffer: &'a [u8],
/// Type of the WAD, either an IWAD (full standalone game) or PWAD (patch wad, a small mod).
pub wadtype: WADType,
// Pairs of (name, data)
entries: Vec<WADEntry<'a>>,
}
impl<'a> Archive for WADArchive<'a> {
}
impl<'a> WADArchive<'a> {
// TODO:
// first_entry(name)
// iter_entry(name)
// iter_between(_start, _end)
// iter_maps()
// iter_flats()
// TODO interesting things:
// - find suspected markers that contain data
}
// TODO:
// - get entry by name (REMEMBER: NAMES CAN REPEAT)
// - detect entry type (using name as a hint as well)
// - how the hell do i iterate over /either/ an entry or a map? maybe that's not even a useful thing to do
// things to check:
// - error: lump outside the bounds of the wad
// - warning: lump overlaps the directory
// - warning: lumps overlap
// - warning: lumps not in the same order physically as in the listing
// - interesting: lumps have gaps
/// Low-level interface to a parsed WAD. This is really only useful for, uh, shenanigans.
pub struct BareWAD<'n> {
pub buffer: &'n [u8],
pub header: BareWADHeader,
pub directory: Vec<BareWADDirectoryEntry<'n>>,
}
// TODO expand these into separate types, probably, so the severity can be an associated value...
// either that or use a method with a big ol match block Ugh
pub enum Diagnostic {
InvalidLumpBounds(usize, usize),
LumpOverlapsIndex,
LumpsOverlap,
LumpsOutOfOrder,
UnusedSpace,
}
impl<'n> BareWAD<'n> {
pub fn diagnose(&self) -> Vec<Diagnostic> {
let ret = vec![];
// TODO this
ret
}
pub fn to_archive(&self) -> WADArchive {
let entries = self.directory.iter()
.map(|bare_entry| WADEntry{
name: Cow::from(bare_entry.name),
data: Cow::from(bare_entry.extract_slice(self.buffer))
})
.collect();
WADArchive{
buffer: self.buffer,
wadtype: self.header.identification,
entries,
}
}
pub fn entry_slice(&self, index: usize) -> &[u8] {
let entry = &self.directory[index];
let start = entry.filepos as usize;
let end = start + entry.size as usize;
&self.buffer[start..end]
}
pub fn first_entry(&self, name: &str) -> Option<&[u8]> {
for entry in self.directory.iter() {
if entry.name == name {
let start = entry.filepos as usize;
let end = start + entry.size as usize;
// TODO what should this do if the offsets are bogus?
return Some(&self.buffer[start..end]);
}
}
None
}
pub fn iter_entries_between(&self, begin_marker: &'n str, end_marker: &'n str) -> BareWADBetweenIterator {
BareWADBetweenIterator{
bare_wad: self,
entry_iter: self.directory.iter(),
begin_marker,
end_marker,
between_markers: false,
}
}
pub fn iter_maps(&self) -> WADMapIterator {
WADMapIterator{ archive: self, entry_iter: self.directory.iter().enumerate().peekable() }
}
}
pub struct BareWADBetweenIterator<'a> {
bare_wad: &'a BareWAD<'a>,
entry_iter: std::slice::Iter<'a, BareWADDirectoryEntry<'a>>,
begin_marker: &'a str,
end_marker: &'a str,
between_markers: bool,
}
impl<'a> Iterator for BareWADBetweenIterator<'a> {
type Item = WADEntry<'a>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(entry) = self.entry_iter.next() {
if entry.name == self.begin_marker {
self.between_markers = true;
}
else if entry.name == self.end_marker {
self.between_markers = false;
}
else if self.between_markers {
return Some(WADEntry{
name: Cow::from(entry.name),
data: Cow::from(entry.extract_slice(self.bare_wad.buffer)),
});
}
}
None
}
}
pub struct BareWADHeader {
pub identification: WADType,
pub numlumps: u32,
pub infotableofs: u32,
}
named!(iwad_tag<WADType>, value!(WADType::IWAD, tag!(b"IWAD")));
named!(pwad_tag<WADType>, value!(WADType::PWAD, tag!(b"PWAD")));
named!(wad_header<BareWADHeader>, do_parse!(
identification: return_error!(
nom::ErrorKind::Custom(1),
alt!(iwad_tag | pwad_tag)) >>
numlumps: le_u32 >>
infotableofs: le_u32 >>
(BareWADHeader{ identification, numlumps, infotableofs })
));
#[derive(Debug)]
pub struct BareWADDirectoryEntry<'name> {
pub filepos: u32,
pub size: u32,
pub name: &'name str,
}
impl<'n> BareWADDirectoryEntry<'n> {
/// Extract the slice described by this entry from a buffer.
pub fn extract_slice<'b>(&self, buf: &'b [u8]) -> &'b [u8] {
let start = self.filepos as usize;
let end = start + self.size as usize;
&buf[start..end]
}
}
fn fixed_length_ascii(mut input: &[u8], len: usize) -> IResult<&[u8], &str> {
if input.len() < len {
return IResult::Incomplete(Needed::Size(len));
}
for i in 0..len {
match input[i] {
0 => {
// This is the end
let s = unsafe { str::from_utf8_unchecked(&input[..i]) };
return IResult::Done(&input[len..], s);
}
32 ... 128 => {
// OK
}
_ => {
// Totally bogus character
return IResult::Error(nom::Err::Code(nom::ErrorKind::Custom(0)));
}
}
}
IResult::Done(&input[len..], unsafe { str::from_utf8_unchecked(&input[..len]) })
}
named!(wad_entry<BareWADDirectoryEntry>, dbg_dmp!(do_parse!(
filepos: le_u32 >>
size: le_u32 >>
name: apply!(fixed_length_ascii, 8) >>
(BareWADDirectoryEntry{ filepos, size, name })
)));
fn wad_directory<'a>(buf: &'a [u8], header: &BareWADHeader) -> IResult<&'a [u8], Vec<BareWADDirectoryEntry<'a>>> {
let lumpct = header.numlumps as usize;
let offset = header.infotableofs as usize;
// TODO can i unhardcode the size of a wad entry here?
let tablelen = lumpct * 16;
if buf.len() < offset + tablelen {
return IResult::Incomplete(Needed::Size(tablelen));
}
let mut ret = Vec::with_capacity(lumpct);
let mut parse_from = &buf[offset..];
for _ in 0..lumpct {
let (leftovers, entry) = try_parse!(parse_from, wad_entry);
ret.push(entry);
parse_from = leftovers;
}
IResult::Done(parse_from, ret)
}
// TODO problems to scan a wad map for:
// - missing a required lump
// TODO curiosities to scan a wad map for:
// - content in ENDMAP
// - UDMF along with the old-style text maps
// FIXME use Result, but, figure out how to get an actual error out of here
// FIXME actually this fairly simple format is a good place to start thinking about how to return errors in general; like, do i want custom errors for tags? etc
pub fn parse_wad(buf: &[u8]) -> Result<BareWAD> {
// FIXME ambiguous whether the error was from parsing the header or the entries
let header = nom_to_result("wad header", buf, wad_header(buf))?;
// TODO buf is not actually the right place here
let entries = nom_to_result("wad index", buf, wad_directory(buf, &header))?;
Ok(BareWAD{ buffer: buf, header, directory: entries })
}
// -----------------------------------------------------------------------------
// Map stuff
// Standard lumps and whether they're required
const MAP_LUMP_ORDER: [(&str, bool); 11] = [
("THINGS", true),
("LINEDEFS", true),
("SIDEDEFS", true),
("VERTEXES", true),
("SEGS", false),
("SSECTORS", false),
("NODES", false),
("SECTORS", true),
("REJECT", false),
("BLOCKMAP", false),
("BEHAVIOR", false),
];
#[allow(dead_code)]
pub struct WADMapIterator<'a> {
archive: &'a BareWAD<'a>,
entry_iter: std::iter::Peekable<std::iter::Enumerate<std::slice::Iter<'a, BareWADDirectoryEntry<'a>>>>,
}
impl<'a> Iterator for WADMapIterator<'a> {
type Item = WADMapEntryBlock;
fn next(&mut self) -> Option<WADMapEntryBlock> {
let (marker_index, map_name) = loop {
if let Some((i, entry)) = self.entry_iter.next() {
if let IResult::Done(_, found_map_name) = vanilla_map_name(entry.name.as_bytes()) {
break (i, found_map_name);
}
}
else {
// Hit the end of the entries
return None;
}
};
let mut range = WADMapEntryBlock{
format: MapFormat::Doom,
name: map_name,
marker_index,
last_index: marker_index,
things_index: None,
linedefs_index: None,
sidedefs_index: None,
vertexes_index: None,
segs_index: None,
ssectors_index: None,
nodes_index: None,
sectors_index: None,
reject_index: None,
blockmap_index: None,
behavior_index: None,
textmap_index: None,
};
// Use peeking here, so that if we stumble onto the next map header, we don't consume it
let (mut i, mut entry) = match self.entry_iter.peek() {
Some(&next) => next,
None => { return None; }
};
if entry.name == "TEXTMAP" {
// This is a UDMF map, which has a completely different scheme: it
// goes until an explicit ENDMAP marker
range.format = MapFormat::UDMF;
// TODO continue this logic
}
for &(lump_name, is_required) in MAP_LUMP_ORDER.iter() {
// TODO i am pretty sure this is supposed to be case-insensitive?
if entry.name == lump_name {
match entry.name {
"THINGS" => { range.things_index = Some(i); }
"LINEDEFS" => { range.linedefs_index = Some(i); }
"SIDEDEFS" => { range.sidedefs_index = Some(i); }
"VERTEXES" => { range.vertexes_index = Some(i); }
"SEGS" => { range.segs_index = Some(i); }
"SSECTORS" => { range.ssectors_index = Some(i); }
"NODES" => { range.nodes_index = Some(i); }
"SECTORS" => { range.sectors_index = Some(i); }
"REJECT" => { range.reject_index = Some(i); }
"BLOCKMAP" => { range.blockmap_index = Some(i); }
"BEHAVIOR" => {
range.behavior_index = Some(i);
// The presence of a BEHAVIOR lump is the sole indication of Hexen format
range.format = MapFormat::Hexen;
}
"TEXTMAP" => { range.textmap_index = Some(i); }
_ => {
// TODO wait, what's the right thing here
break;
}
}
self.entry_iter.next();
match self.entry_iter.peek() {
Some(&(next_i, next_entry)) => {
i = next_i;
entry = next_entry;
}
None => {
// FIXME this needs to check whether there are any
// /required/ lumps not yet seen, ugh
break;
}
}
}
else if is_required {
// FIXME return a better error: expected lump X, found Y
// FIXME should this really stop us from iterating over any further maps?
// TODO should we try to cleverly detect what happened here? what DID happen here, anyway?
// TODO maybe we should return what we have so far, and let the conversion to a real map take care of it? but then how do we handle missing only one lump (do we grab the rest)? what about duplicate lumps?
// TODO same questions go for the places i used try!(), except i think i got the logic even worse there, idk. write some tests
return None;
}
}
range.last_index = i;
Some(range)
}
}
#[derive(Debug)]
pub struct WADMapEntryBlock {
pub format: MapFormat,
pub name: MapName, // TODO what are the rules in zdoom? can you really use any map name?
pub marker_index: usize,
pub last_index: usize,
pub things_index: Option<usize>,
pub linedefs_index: Option<usize>,
pub sidedefs_index: Option<usize>,
pub vertexes_index: Option<usize>,
pub segs_index: Option<usize>,
pub ssectors_index: Option<usize>,
pub nodes_index: Option<usize>,
pub sectors_index: Option<usize>,
pub reject_index: Option<usize>,
pub blockmap_index: Option<usize>,
pub behavior_index: Option<usize>,
pub textmap_index: Option<usize>,
// TODO endmap
}
// TODO map parsing requires:
// - come up with some way to treat a map as a single unit in a wad (is there anything else that acts this way?)
// - parsers for:
// - THINGS
// - LINEDEFS
// - SIDEDEFS
// - SEGS
// - SSECTORS (deferrable)
// - NODES (deferrable)
// - SECTORS
// - REJECT (deferrable)
// - BLOCKMAP (deferrable)
// - put all this in its own module/hierarchy
named!(hexen_args<[u8; 5]>, count_fixed!(u8, le_u8, 5));
#[derive(Debug)]
pub struct BareDoomThing {
pub x: i16,
pub y: i16,
// TODO what is this
pub angle: i16,
pub doomednum: i16,
// NOTE: boom added two flags, and mbf one more, so this is a decent signal for targeting those (but not 100%)
pub flags: u16,
}
impl BareDoomThing {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.x)?;
writer.write_i16::<LittleEndian>(self.y)?;
writer.write_i16::<LittleEndian>(self.angle)?;
writer.write_i16::<LittleEndian>(self.doomednum)?;
writer.write_u16::<LittleEndian>(self.flags)?;
Ok(())
}
}
// TODO totally different in hexen
named!(doom_things_lump<Vec<BareDoomThing>>, terminated!(many0!(do_parse!(
x: le_i16 >>
y: le_i16 >>
angle: le_i16 >>
doomednum: le_i16 >>
flags: le_u16 >>
(BareDoomThing{ x, y, angle, doomednum, flags })
)), eof!()));
#[derive(Debug)]
pub struct BareHexenThing {
// TODO is this really signed in hexen?
pub tid: i16,
pub x: i16,
pub y: i16,
pub z: i16,
// TODO what is this
pub angle: i16,
pub doomednum: i16,
pub flags: u16,
pub special: u8,
pub args: [u8; 5],
}
// TODO totally different in hexen
named!(hexen_things_lump<Vec<BareHexenThing>>, terminated!(many0!(do_parse!(
tid: le_i16 >>
x: le_i16 >>
y: le_i16 >>
z: le_i16 >>
angle: le_i16 >>
doomednum: le_i16 >>
flags: le_u16 >>
special: le_u8 >>
args: hexen_args >>
(BareHexenThing{
tid,
x,
y,
z,
angle,
doomednum,
flags,
special,
args,
})
)), eof!()));
pub trait BareBinaryThing {
fn coords(&self) -> (i16, i16);
fn doomednum(&self) -> i16;
}
impl BareBinaryThing for BareDoomThing {
fn coords(&self) -> (i16, i16) {
(self.x, self.y)
}
fn doomednum(&self) -> i16 {
self.doomednum
}
}
impl BareBinaryThing for BareHexenThing {
fn coords(&self) -> (i16, i16) {
(self.x, self.y)
}
fn doomednum(&self) -> i16 {
self.doomednum
}
}
// FIXME vertex/sidedef indices are i16 in vanilla, but extended to u16 in most source ports; note that for true vanilla, a negative index makes no sense anyway
// FIXME hexen extends this, which requires detecting hexen format
// FIXME what exactly is the higher-level structure that holds actual references to the sidedefs?
#[derive(Debug)]
pub struct BareDoomLine {
pub v0: i16,
pub v1: i16,
pub flags: i16,
pub special: i16,
pub sector_tag: i16,
// NOTE: -1 to mean none
pub front_sidedef: i16,
pub back_sidedef: i16,
}
impl BareDoomLine {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.v0)?;
writer.write_i16::<LittleEndian>(self.v1)?;
writer.write_i16::<LittleEndian>(self.flags)?;
writer.write_i16::<LittleEndian>(self.special)?;
writer.write_i16::<LittleEndian>(self.sector_tag)?;
writer.write_i16::<LittleEndian>(self.front_sidedef)?;
writer.write_i16::<LittleEndian>(self.back_sidedef)?;
Ok(())
}
}
named!(doom_linedefs_lump<Vec<BareDoomLine>>, terminated!(many0!(do_parse!(
v0: le_i16 >>
v1: le_i16 >>
flags: le_i16 >>
special: le_i16 >>
sector_tag: le_i16 >>
front_sidedef: le_i16 >>
back_sidedef: le_i16 >>
(BareDoomLine{ v0, v1, flags, special, sector_tag, front_sidedef, back_sidedef })
)), eof!()));
// TODO source ports extended ids to unsigned here too
#[derive(Debug)]
pub struct BareHexenLine {
pub v0: i16,
pub v1: i16,
pub flags: i16,
pub special: u8,
pub args: [u8; 5],
// NOTE: -1 to mean none
pub front_sidedef: i16,
pub back_sidedef: i16,
}
named!(hexen_linedefs_lump<Vec<BareHexenLine>>, terminated!(many0!(do_parse!(
v0: le_i16 >>
v1: le_i16 >>
flags: le_i16 >>
special: le_u8 >>
args: hexen_args >>
front_sidedef: le_i16 >>
back_sidedef: le_i16 >>
(BareHexenLine{
v0,
v1,
flags,
special,
args,
front_sidedef,
back_sidedef,
})
)), eof!()));
pub trait BareBinaryLine {
fn vertex_indices(&self) -> (i16, i16);
fn side_indices(&self) -> (i16, i16);
fn has_special(&self) -> bool;
fn flags(&self) -> i16;
}
impl BareBinaryLine for BareDoomLine {
fn vertex_indices(&self) -> (i16, i16) {
(self.v0, self.v1)
}
fn side_indices(&self) -> (i16, i16) {
(self.front_sidedef, self.back_sidedef)
}
fn has_special(&self) -> bool {
self.special != 0
}
fn flags(&self) -> i16 {
self.flags
}
}
impl BareBinaryLine for BareHexenLine {
fn vertex_indices(&self) -> (i16, i16) {
(self.v0, self.v1)
}
fn side_indices(&self) -> (i16, i16) {
(self.front_sidedef, self.back_sidedef)
}
fn has_special(&self) -> bool {
self.special != 0
}
fn flags(&self) -> i16 {
self.flags
}
}
#[derive(Debug)]
pub struct BareSide<'a> {
pub x_offset: i16,
pub y_offset: i16,
pub upper_texture: &'a str,
pub lower_texture: &'a str,
pub middle_texture: &'a str,
pub sector: i16,
}
impl<'a> BareSide<'a> {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.x_offset)?;
writer.write_i16::<LittleEndian>(self.y_offset)?;
writer.write(self.upper_texture.as_bytes())?;
for _ in self.upper_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write(self.lower_texture.as_bytes())?;
for _ in self.lower_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write(self.middle_texture.as_bytes())?;
for _ in self.middle_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write_i16::<LittleEndian>(self.sector)?;
Ok(())
}
}
// FIXME using many0! followed by eof! means that if the parse fails, many0! thinks that's a
// success, stops, and then hits the eof! and fails, which loses the original error and is really
// confusing
// TODO file some tickets on nom:
// - docs for call! are actually for apply!
// - many_till! with eof! gives a bizarre error about being unable to infer a type for E
// - error management guide seems to be pre-2.0; mentions importing from nom::util, which is
// private, and makes no mention of verbose vs simple errors at all
// - also, even with verbose errors, error handling kinda sucks? i'm not even sure why this is
// an enum when it gives me completely useless alternations, some of which (ManyTill) are
// thrown in multiple places
// - seems impossible to use a different error type due to rust's not very good inference rules
// - many_till throws away the underlying error.
macro_rules! typed_eof (
($i:expr,) => (
{
let res: IResult<_, _> = eof!($i,);
res
}
);
);
named!(sidedefs_lump<Vec<BareSide>>, map!(many_till!(do_parse!(
x_offset: le_i16 >>
y_offset: le_i16 >>
upper_texture: apply!(fixed_length_ascii, 8) >>
lower_texture: apply!(fixed_length_ascii, 8) >>
middle_texture: apply!(fixed_length_ascii, 8) >>
sector: le_i16 >>
(BareSide{
x_offset,
y_offset,
upper_texture,
lower_texture,
middle_texture,
sector
})
), typed_eof!()), |(r, _)| r));
// FIXME: vertices are i16 for vanilla, 15/16 fixed for ps/n64, effectively infinite but really f32 for udmf
#[derive(Debug)]
pub struct BareVertex {
pub x: i16,
pub y: i16,
}
impl BareVertex {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.x)?;
writer.write_i16::<LittleEndian>(self.y)?;
Ok(())
}
}
named!(vertexes_lump<Vec<BareVertex>>, terminated!(many0!(do_parse!(
x: le_i16 >>
y: le_i16 >>
(BareVertex{ x, y })
)), eof!()));
#[derive(Debug)]
pub struct BareSector<'a> {
pub floor_height: i16,
pub ceiling_height: i16,
pub floor_texture: &'a str,
pub ceiling_texture: &'a str,
pub light: i16, // XXX what?? light can only go up to 255!
pub sector_type: i16, // TODO check if these are actually signed or what
pub sector_tag: i16,
}
impl<'a> BareSector<'a> {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.floor_height)?;
writer.write_i16::<LittleEndian>(self.ceiling_height)?;
writer.write(self.floor_texture.as_bytes())?;
for _ in self.floor_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write(self.ceiling_texture.as_bytes())?;
for _ in self.ceiling_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write_i16::<LittleEndian>(self.light)?;
writer.write_i16::<LittleEndian>(self.sector_type)?;
writer.write_i16::<LittleEndian>(self.sector_tag)?;
Ok(())
}
}
named!(sectors_lump<Vec<BareSector>>, terminated!(many0!(do_parse!(
floor_height: le_i16 >>
ceiling_height: le_i16 >>
floor_texture: apply!(fixed_length_ascii, 8) >>
ceiling_texture: apply!(fixed_length_ascii, 8) >>
light: le_i16 >>
sector_type: le_i16 >>
sector_tag: le_i16 >>
(BareSector{
floor_height,
ceiling_height,
floor_texture,
ceiling_texture,
light,
sector_type,
sector_tag,
})
)), eof!()));
#[derive(Debug)]
pub struct BareBinaryMap<'a, L: BareBinaryLine, T: BareBinaryThing> {
pub vertices: Vec<BareVertex>,
pub sectors: Vec<BareSector<'a>>,
pub sides: Vec<BareSide<'a>>,
pub lines: Vec<L>,
pub things: Vec<T>,
}
/// The result of parsing a Doom-format map definition. The contained
/// structures have not been changed in any way. Everything is public, and
/// nothing is preventing you from meddling with the contained data in a way
/// that might make it invalid.
pub type BareDoomMap<'a> = BareBinaryMap<'a, BareDoomLine, BareDoomThing>;
/// The result of parsing a Hexen-format map definition. The contained
/// structures have not been changed in any way. Everything is public, and
/// nothing is preventing you from meddling with the contained data in a way
/// that might make it invalid.
pub type BareHexenMap<'a> = BareBinaryMap<'a, BareHexenLine, BareHexenThing>;
#[derive(Debug)]
pub enum BareMap<'a> {
Doom(BareDoomMap<'a>),
Hexen(BareHexenMap<'a>),
}
// TODO much more error handling wow lol
pub fn parse_doom_map<'a>(archive: &'a BareWAD, range: &WADMapEntryBlock) -> Result<BareMap<'a>> {
// TODO the map being parsed doesn't appear in the returned error... sigh
let vertexes_index = range.vertexes_index.ok_or(ErrorKind::MissingMapLump("VERTEXES"))?;
let buf = archive.entry_slice(vertexes_index);
let vertices = nom_to_result("VERTEXES lump", buf, vertexes_lump(buf))?;
let sectors_index = range.sectors_index.ok_or(ErrorKind::MissingMapLump("SECTORS"))?;
let buf = archive.entry_slice(sectors_index);
let sectors = nom_to_result("SECTORS lump", buf, sectors_lump(buf))?;
let sidedefs_index = range.sidedefs_index.ok_or(ErrorKind::MissingMapLump("SIDEDEFS"))?;
let buf = archive.entry_slice(sidedefs_index);
let sides = nom_to_result("SIDEDEFS lump", buf, sidedefs_lump(buf))?;
if range.format == MapFormat::Doom {
let linedefs_index = range.linedefs_index.ok_or(ErrorKind::MissingMapLump("LINEDEFS"))?;
let buf = archive.entry_slice(linedefs_index);
let lines = nom_to_result("LINEDEFS lump", buf, doom_linedefs_lump(buf))?;
let things_index = range.things_index.ok_or(ErrorKind::MissingMapLump("THINGS"))?;
let buf = archive.entry_slice(things_index);
let things = nom_to_result("THINGS lump", buf, doom_things_lump(buf))?;
Ok(BareMap::Doom(BareDoomMap{
vertices,
sectors,
sides,
lines,
things,
}))
}
else {
let linedefs_index = range.linedefs_index.ok_or(ErrorKind::MissingMapLump("LINEDEFS"))?;
let buf = archive.entry_slice(linedefs_index);
let lines = nom_to_result("LINEDEFS lump", buf, hexen_linedefs_lump(buf))?;
let things_index = range.things_index.ok_or(ErrorKind::MissingMapLump("THINGS"))?;
let buf = archive.entry_slice(things_index);
let things = nom_to_result("THINGS lump", buf, hexen_things_lump(buf))?;
Ok(BareMap::Hexen(BareHexenMap{
vertices,
sectors,
sides,
lines,
things,
}))
}
}
#[derive(Copy, Clone, Debug)]
pub enum Facing {
Front,
Back,
}
use std::collections::HashMap;
// TODO ok so this is mildly clever but won't work once we get to UDMF champ
impl<'a, L: BareBinaryLine, T: BareBinaryThing> BareBinaryMap<'a, L, T> {
// TODO this is a horrible fucking mess. but it's a /contained/ horrible fucking mess, so.
pub fn sector_to_polygons(&self, s: usize) -> Vec<Vec<&BareVertex>> {
struct Edge<'a, L: 'a> {
line: &'a L,
side: &'a BareSide<'a>,
facing: Facing,
v0: &'a BareVertex,
v1: &'a BareVertex,
done: bool,
}
// This is just to convince HashMap to hash on the actual reference, not the underlying
// BareVertex value
struct VertexRef<'a>(&'a BareVertex);
impl<'a> PartialEq for VertexRef<'a> {
fn eq(&self, other: &VertexRef) -> bool {
return (self.0 as *const _) == (other.0 as *const _);
}
}
impl<'a> Eq for VertexRef<'a> {}
impl<'a> std::hash::Hash for VertexRef<'a> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
(self.0 as *const BareVertex).hash(state)
}
}
let mut edges = vec![];
let mut vertices_to_edges = HashMap::new();
// TODO linear scan -- would make more sense to turn the entire map into polygons in one go
for line in self.lines.iter() {
let (frontid, backid) = line.side_indices();
// FIXME need to do this better
if frontid != -1 && backid != -1 && self.sides[frontid as usize].sector == self.sides[backid as usize].sector {
continue;
}
for &(facing, sideid) in [(Facing::Front, frontid), (Facing::Back, backid)].iter() {
if sideid == -1 {
continue;
}
// TODO this and the vertices lookups might be bogus and crash...
let side = &self.sides[sideid as usize];
if side.sector as usize == s {
let (v0, v1) = line.vertex_indices();
let edge = Edge{
line,
side,
facing,
// TODO should these be swapped depending on the line facing?
v0: &self.vertices[v0 as usize],
v1: &self.vertices[v1 as usize],
done: false,
};
edges.push(edge);
vertices_to_edges.entry(VertexRef(&self.vertices[v0 as usize])).or_insert(Vec::new()).push(edges.len() - 1);
vertices_to_edges.entry(VertexRef(&self.vertices[v1 as usize])).or_insert(Vec::new()).push(edges.len() - 1);
}
}
}
// Trace sectors by starting at the first side's first vertex and attempting to walk from
// there
let mut outlines = Vec::new();
let mut seen_vertices = HashMap::new();
while edges.len() > 0 {
let mut next_vertices = vec![];
for edge in edges.iter() {
// TODO having done-ness for both edges and vertices seems weird, idk
if !seen_vertices.contains_key(&VertexRef(edge.v0)) {
next_vertices.push(edge.v0);
break;
}
if !seen_vertices.contains_key(&VertexRef(edge.v1)) {
next_vertices.push(edge.v1);
break;
}
}
if next_vertices.is_empty() {
break;
}
let mut outline = Vec::new();
while next_vertices.len() > 0 {
let vertices = next_vertices;
next_vertices = Vec::new();
for vertex in vertices.iter() {
if seen_vertices.contains_key(&VertexRef(vertex)) {
continue;
}
seen_vertices.insert(VertexRef(vertex), true);
outline.push(*vertex);
// TODO so, problems occur here if:
// - a vertex has more than two edges
// - special case: double-sided edges are OK! but we have to eliminate
// those, WITHOUT ruining entirely self-referencing sectors
// - a vertex has one edge
for e in vertices_to_edges.get(&VertexRef(vertex)).unwrap().iter() {
let edge = &mut edges[*e];
if edge.done {
// TODO actually this seems weird? why would this happen.
continue;
}
edge.done = true;
if !seen_vertices.contains_key(&VertexRef(edge.v0)) {
next_vertices.push(edge.v0);
}
else if !seen_vertices.contains_key(&VertexRef(edge.v1)) {
next_vertices.push(edge.v1);
}
// Only add EXACTLY ONE vertex at a time for now -- so, assuming simple
// polygons! Figure out the rest, uh, later.
break;
}
}
}
if outline.len() > 0 {
outlines.push(outline);
}
}
outlines
}
// TODO of course, this doesn't take later movement of sectors into account, dammit
pub fn count_textures(&self) -> HashMap<&str, (usize, f32)> {
let mut counts: HashMap<_, (usize, f32)> = HashMap::new();
// This block exists only so `add` goes out of scope (and stops borrowing counts) before we
// return; I don't know why the compiler cares when `add` clearly doesn't escape
{
let mut add = |tex, area| {
// TODO iirc doom64 or something uses a different empty texture name, "?"
if tex != "-" {
let entry = counts.entry(tex).or_insert((0, 0.0));
entry.0 += 1;
entry.1 += area;
}
};
for line in self.lines.iter() {
let (frontid, backid) = line.side_indices();
if frontid == -1 && backid == -1 {
// No sides; skip
continue;
}
let (v0i, v1i) = line.vertex_indices();
let v0 = &self.vertices[v0i as usize];
let v1 = &self.vertices[v1i as usize];
let dx = (v1.x - v0.x) as f32;
let dy = (v1.y - v0.y) as f32;
let length = (dx * dx + dy * dy).sqrt();
if frontid != -1 && backid != -1 {
// Two-sided line
// TODO checking for the two-sided flag is an interesting map check
// TODO this might be bogus and crash...
// TODO actually that's a good thing to put in a map check
let front_side = &self.sides[frontid as usize];
let back_side = &self.sides[backid as usize];
// TODO sector is an i16?? can it be negative??? indicating no sector?
// (i mean obviously it can be bogus regardless, but can it be deliberately bogus?)
let front_sector = &self.sectors[front_side.sector as usize];
let back_sector = &self.sectors[back_side.sector as usize];
let lowest_ceiling;
let ceiling_diff = front_sector.ceiling_height - back_sector.ceiling_height;
if ceiling_diff > 0 {
let front_upper_height = ceiling_diff as f32;
add(front_side.upper_texture, length * front_upper_height);
lowest_ceiling = back_sector.ceiling_height;
}
else {
let back_upper_height = -ceiling_diff as f32;
add(front_side.upper_texture, length * back_upper_height);
lowest_ceiling = front_sector.ceiling_height;
}
let highest_floor;
let floor_diff = front_sector.floor_height - back_sector.floor_height;
if floor_diff > 0 {
let back_lower_height = floor_diff as f32;
add(back_side.lower_texture, length * back_lower_height);
highest_floor = back_sector.floor_height;
}
else {
let front_lower_height = -floor_diff as f32;
add(front_side.lower_texture, length * front_lower_height);
highest_floor = front_sector.floor_height;
}
let middle_height = (lowest_ceiling - highest_floor) as f32;
// TODO map check for negative height (but this is valid for vavoom-style 3d floors!)
if middle_height > 0.0 {
add(front_side.middle_texture, length * middle_height);
add(back_side.middle_texture, length * middle_height);
}
}
else if backid == -1 {
// Typical one-sided wall
// TODO map check for no two-sided flag
let front_side = &self.sides[frontid as usize];
let front_sector = &self.sectors[front_side.sector as usize];
let middle_height = (front_sector.ceiling_height - front_sector.floor_height) as f32;
add(front_side.middle_texture, length * middle_height);
}
else if frontid == -1 {
// Backwards one-sided wall
// TODO map check for no two-sided flag
// TODO maybe a warning for this case too because it's weird
let back_side = &self.sides[backid as usize];
let back_sector = &self.sectors[back_side.sector as usize];
let middle_height = (back_sector.ceiling_height - back_sector.floor_height) as f32;
add(back_side.middle_texture, length * middle_height);
}
}
}
counts
}
}
pub struct TEXTURExEntry<'a> {
pub name: &'a str,
pub width: i16,
pub height: i16,
}
named!(texturex_lump_header<Vec<i32>>, do_parse!(
numtextures: le_i32 >>
offsets: many_m_n!(numtextures as usize, numtextures as usize, le_i32) >>
(offsets)
));
named!(texturex_lump_entry<TEXTURExEntry>, do_parse!(
name: apply!(fixed_length_ascii, 8) >>
le_i32 >> // "masked", unused
// TODO these should be positive
width: le_i16 >>
height: le_i16 >>
le_i32 >> // "columndirectory", unused
patchcount: le_i16 >>
// TODO patches
(TEXTURExEntry{
name,
width,
height,
})
));
pub fn parse_texturex_names(buf: &[u8]) -> Result<Vec<TEXTURExEntry>> {
let offsets = nom_to_result("TEXTUREx header", buf, texturex_lump_header(buf))?;
let mut ret = Vec::with_capacity(offsets.len());
for (i, &offset) in offsets.iter().enumerate() {
if offset < 0 {
bail!(ErrorKind::NegativeOffset("TEXTUREx", i, offset as isize));
}
// TODO check for too large offset too, instead of Incomplete
ret.push(nom_to_result("TEXTUREx", buf, texturex_lump_entry(&buf[(offset as usize)..]))?);
}
Ok(ret)
}
Exclude DEL char and out-of-range value in `fixed_length_ascii()`
extern crate byteorder;
extern crate euclid;
extern crate typed_arena;
extern crate bit_vec;
#[macro_use]
extern crate nom;
#[macro_use]
extern crate error_chain;
extern crate svg; // TODO temp for debugging
use std::borrow::Cow;
use std::io::Write;
use std::str::FromStr;
use std::str;
use std::u8;
use byteorder::{LittleEndian, WriteBytesExt};
use nom::{IResult, Needed, digit, le_i16, le_i32, le_u16, le_u32, le_u8};
pub mod errors;
use errors::{ErrorKind, Result, nom_to_result};
pub mod geom;
pub mod map;
pub mod universe;
pub mod shapeops;
mod vanilladoom;
// FIXME so, this whole file is kind of a mess. i was trying to make the raw binary data available
// for inspection without needing to parse into a whole map object, and that turns out to be
// complicated? designing a wad browsing api is also kinda hard, since... well.
// i'm not entirely sure what i should do to remedy all of this. a Map is fairly heavy-handed, and
// you can still do plenty of interesting stuff without one.
// TODO Doom 64? PlayStation Doom? others?
pub enum SourcePort {
// TODO distinguish doom 1 and 2, since they have different sets of things? final doom?
Doom,
Heretic,
Hexen,
Strife,
Boom,
// TODO boom plus skybox transfer
// TODO try to distinguish versions of these...??
ZDoom,
GZDoom,
// I don't know so much about these
MBF,
Legacy,
Eternity,
Vavoom,
}
pub enum BaseGame {
None, // i.e. this IS a base game, or is a TC, or whatever
Doom1, // TODO distinguish from ultimate doom?
Doom2,
TNTEvilution,
Plutonia,
Heretic,
Hexen,
Strife,
}
#[derive(Debug)]
pub enum MapName {
ExMy(u8, u8),
MAPxx(u8),
}
impl std::fmt::Display for MapName {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
MapName::ExMy(x, y) => write!(f, "E{}M{}", x, y),
MapName::MAPxx(x) => write!(f, "MAP{:02}", x),
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum MapFormat {
Doom,
Hexen,
UDMF,
}
named!(exmy_map_name(&[u8]) -> MapName, do_parse!(
tag!(b"E") >>
e: digit >>
tag!(b"M") >>
m: digit >>
eof!() >>
(MapName::ExMy(
// We already know e and m are digits, so this is all safe
u8::from_str(str::from_utf8(e).unwrap()).unwrap(),
u8::from_str(str::from_utf8(m).unwrap()).unwrap(),
))
));
named!(mapxx_map_name(&[u8]) -> MapName, do_parse!(
tag!(b"MAP") >>
xx: digit >>
eof!() >>
(MapName::MAPxx(
// TODO need to enforce that xx is exactly two digits! and also in [1, 32]
u8::from_str(str::from_utf8(xx).unwrap()).unwrap()
))
));
named!(vanilla_map_name(&[u8]) -> MapName, alt!(exmy_map_name | mapxx_map_name));
#[derive(Copy, Clone, Debug)]
pub enum WADType {
IWAD,
PWAD,
}
// TODO do... these
trait Archive {
// TODO needs a feature atm -- const supports_duplicate_names: bool;
}
trait WAD {
}
// TODO this should be able to contain an arbitrarily-typed entry, right? but when does the type
// detection happen?
pub struct WADEntry<'a> {
pub name: Cow<'a, str>,
pub data: Cow<'a, [u8]>,
}
/// High-level interface to a WAD archive. Does its best to prevent you from producing an invalid
/// WAD. This is probably what you want.
#[allow(dead_code)]
pub struct WADArchive<'a> {
// TODO it would be nice if we could take ownership of the slice somehow, but i don't know how
// to do that really. i also don't know how to tell rust that the entry slices are owned by
// this buffer?
buffer: &'a [u8],
/// Type of the WAD, either an IWAD (full standalone game) or PWAD (patch wad, a small mod).
pub wadtype: WADType,
// Pairs of (name, data)
entries: Vec<WADEntry<'a>>,
}
impl<'a> Archive for WADArchive<'a> {
}
impl<'a> WADArchive<'a> {
// TODO:
// first_entry(name)
// iter_entry(name)
// iter_between(_start, _end)
// iter_maps()
// iter_flats()
// TODO interesting things:
// - find suspected markers that contain data
}
// TODO:
// - get entry by name (REMEMBER: NAMES CAN REPEAT)
// - detect entry type (using name as a hint as well)
// - how the hell do i iterate over /either/ an entry or a map? maybe that's not even a useful thing to do
// things to check:
// - error: lump outside the bounds of the wad
// - warning: lump overlaps the directory
// - warning: lumps overlap
// - warning: lumps not in the same order physically as in the listing
// - interesting: lumps have gaps
/// Low-level interface to a parsed WAD. This is really only useful for, uh, shenanigans.
pub struct BareWAD<'n> {
pub buffer: &'n [u8],
pub header: BareWADHeader,
pub directory: Vec<BareWADDirectoryEntry<'n>>,
}
// TODO expand these into separate types, probably, so the severity can be an associated value...
// either that or use a method with a big ol match block Ugh
pub enum Diagnostic {
InvalidLumpBounds(usize, usize),
LumpOverlapsIndex,
LumpsOverlap,
LumpsOutOfOrder,
UnusedSpace,
}
impl<'n> BareWAD<'n> {
pub fn diagnose(&self) -> Vec<Diagnostic> {
let ret = vec![];
// TODO this
ret
}
pub fn to_archive(&self) -> WADArchive {
let entries = self.directory.iter()
.map(|bare_entry| WADEntry{
name: Cow::from(bare_entry.name),
data: Cow::from(bare_entry.extract_slice(self.buffer))
})
.collect();
WADArchive{
buffer: self.buffer,
wadtype: self.header.identification,
entries,
}
}
pub fn entry_slice(&self, index: usize) -> &[u8] {
let entry = &self.directory[index];
let start = entry.filepos as usize;
let end = start + entry.size as usize;
&self.buffer[start..end]
}
pub fn first_entry(&self, name: &str) -> Option<&[u8]> {
for entry in self.directory.iter() {
if entry.name == name {
let start = entry.filepos as usize;
let end = start + entry.size as usize;
// TODO what should this do if the offsets are bogus?
return Some(&self.buffer[start..end]);
}
}
None
}
pub fn iter_entries_between(&self, begin_marker: &'n str, end_marker: &'n str) -> BareWADBetweenIterator {
BareWADBetweenIterator{
bare_wad: self,
entry_iter: self.directory.iter(),
begin_marker,
end_marker,
between_markers: false,
}
}
pub fn iter_maps(&self) -> WADMapIterator {
WADMapIterator{ archive: self, entry_iter: self.directory.iter().enumerate().peekable() }
}
}
pub struct BareWADBetweenIterator<'a> {
bare_wad: &'a BareWAD<'a>,
entry_iter: std::slice::Iter<'a, BareWADDirectoryEntry<'a>>,
begin_marker: &'a str,
end_marker: &'a str,
between_markers: bool,
}
impl<'a> Iterator for BareWADBetweenIterator<'a> {
type Item = WADEntry<'a>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(entry) = self.entry_iter.next() {
if entry.name == self.begin_marker {
self.between_markers = true;
}
else if entry.name == self.end_marker {
self.between_markers = false;
}
else if self.between_markers {
return Some(WADEntry{
name: Cow::from(entry.name),
data: Cow::from(entry.extract_slice(self.bare_wad.buffer)),
});
}
}
None
}
}
pub struct BareWADHeader {
pub identification: WADType,
pub numlumps: u32,
pub infotableofs: u32,
}
named!(iwad_tag<WADType>, value!(WADType::IWAD, tag!(b"IWAD")));
named!(pwad_tag<WADType>, value!(WADType::PWAD, tag!(b"PWAD")));
named!(wad_header<BareWADHeader>, do_parse!(
identification: return_error!(
nom::ErrorKind::Custom(1),
alt!(iwad_tag | pwad_tag)) >>
numlumps: le_u32 >>
infotableofs: le_u32 >>
(BareWADHeader{ identification, numlumps, infotableofs })
));
#[derive(Debug)]
pub struct BareWADDirectoryEntry<'name> {
pub filepos: u32,
pub size: u32,
pub name: &'name str,
}
impl<'n> BareWADDirectoryEntry<'n> {
/// Extract the slice described by this entry from a buffer.
pub fn extract_slice<'b>(&self, buf: &'b [u8]) -> &'b [u8] {
let start = self.filepos as usize;
let end = start + self.size as usize;
&buf[start..end]
}
}
fn fixed_length_ascii(mut input: &[u8], len: usize) -> IResult<&[u8], &str> {
if input.len() < len {
return IResult::Incomplete(Needed::Size(len));
}
for i in 0..len {
match input[i] {
0 => {
// This is the end
let s = unsafe { str::from_utf8_unchecked(&input[..i]) };
return IResult::Done(&input[len..], s);
}
32 ... 126 => {
// OK
}
_ => {
// Totally bogus character
return IResult::Error(nom::Err::Code(nom::ErrorKind::Custom(0)));
}
}
}
IResult::Done(&input[len..], unsafe { str::from_utf8_unchecked(&input[..len]) })
}
named!(wad_entry<BareWADDirectoryEntry>, dbg_dmp!(do_parse!(
filepos: le_u32 >>
size: le_u32 >>
name: apply!(fixed_length_ascii, 8) >>
(BareWADDirectoryEntry{ filepos, size, name })
)));
fn wad_directory<'a>(buf: &'a [u8], header: &BareWADHeader) -> IResult<&'a [u8], Vec<BareWADDirectoryEntry<'a>>> {
let lumpct = header.numlumps as usize;
let offset = header.infotableofs as usize;
// TODO can i unhardcode the size of a wad entry here?
let tablelen = lumpct * 16;
if buf.len() < offset + tablelen {
return IResult::Incomplete(Needed::Size(tablelen));
}
let mut ret = Vec::with_capacity(lumpct);
let mut parse_from = &buf[offset..];
for _ in 0..lumpct {
let (leftovers, entry) = try_parse!(parse_from, wad_entry);
ret.push(entry);
parse_from = leftovers;
}
IResult::Done(parse_from, ret)
}
// TODO problems to scan a wad map for:
// - missing a required lump
// TODO curiosities to scan a wad map for:
// - content in ENDMAP
// - UDMF along with the old-style text maps
// FIXME use Result, but, figure out how to get an actual error out of here
// FIXME actually this fairly simple format is a good place to start thinking about how to return errors in general; like, do i want custom errors for tags? etc
pub fn parse_wad(buf: &[u8]) -> Result<BareWAD> {
// FIXME ambiguous whether the error was from parsing the header or the entries
let header = nom_to_result("wad header", buf, wad_header(buf))?;
// TODO buf is not actually the right place here
let entries = nom_to_result("wad index", buf, wad_directory(buf, &header))?;
Ok(BareWAD{ buffer: buf, header, directory: entries })
}
// -----------------------------------------------------------------------------
// Map stuff
// Standard lumps and whether they're required
const MAP_LUMP_ORDER: [(&str, bool); 11] = [
("THINGS", true),
("LINEDEFS", true),
("SIDEDEFS", true),
("VERTEXES", true),
("SEGS", false),
("SSECTORS", false),
("NODES", false),
("SECTORS", true),
("REJECT", false),
("BLOCKMAP", false),
("BEHAVIOR", false),
];
#[allow(dead_code)]
pub struct WADMapIterator<'a> {
archive: &'a BareWAD<'a>,
entry_iter: std::iter::Peekable<std::iter::Enumerate<std::slice::Iter<'a, BareWADDirectoryEntry<'a>>>>,
}
impl<'a> Iterator for WADMapIterator<'a> {
type Item = WADMapEntryBlock;
fn next(&mut self) -> Option<WADMapEntryBlock> {
let (marker_index, map_name) = loop {
if let Some((i, entry)) = self.entry_iter.next() {
if let IResult::Done(_, found_map_name) = vanilla_map_name(entry.name.as_bytes()) {
break (i, found_map_name);
}
}
else {
// Hit the end of the entries
return None;
}
};
let mut range = WADMapEntryBlock{
format: MapFormat::Doom,
name: map_name,
marker_index,
last_index: marker_index,
things_index: None,
linedefs_index: None,
sidedefs_index: None,
vertexes_index: None,
segs_index: None,
ssectors_index: None,
nodes_index: None,
sectors_index: None,
reject_index: None,
blockmap_index: None,
behavior_index: None,
textmap_index: None,
};
// Use peeking here, so that if we stumble onto the next map header, we don't consume it
let (mut i, mut entry) = match self.entry_iter.peek() {
Some(&next) => next,
None => { return None; }
};
if entry.name == "TEXTMAP" {
// This is a UDMF map, which has a completely different scheme: it
// goes until an explicit ENDMAP marker
range.format = MapFormat::UDMF;
// TODO continue this logic
}
for &(lump_name, is_required) in MAP_LUMP_ORDER.iter() {
// TODO i am pretty sure this is supposed to be case-insensitive?
if entry.name == lump_name {
match entry.name {
"THINGS" => { range.things_index = Some(i); }
"LINEDEFS" => { range.linedefs_index = Some(i); }
"SIDEDEFS" => { range.sidedefs_index = Some(i); }
"VERTEXES" => { range.vertexes_index = Some(i); }
"SEGS" => { range.segs_index = Some(i); }
"SSECTORS" => { range.ssectors_index = Some(i); }
"NODES" => { range.nodes_index = Some(i); }
"SECTORS" => { range.sectors_index = Some(i); }
"REJECT" => { range.reject_index = Some(i); }
"BLOCKMAP" => { range.blockmap_index = Some(i); }
"BEHAVIOR" => {
range.behavior_index = Some(i);
// The presence of a BEHAVIOR lump is the sole indication of Hexen format
range.format = MapFormat::Hexen;
}
"TEXTMAP" => { range.textmap_index = Some(i); }
_ => {
// TODO wait, what's the right thing here
break;
}
}
self.entry_iter.next();
match self.entry_iter.peek() {
Some(&(next_i, next_entry)) => {
i = next_i;
entry = next_entry;
}
None => {
// FIXME this needs to check whether there are any
// /required/ lumps not yet seen, ugh
break;
}
}
}
else if is_required {
// FIXME return a better error: expected lump X, found Y
// FIXME should this really stop us from iterating over any further maps?
// TODO should we try to cleverly detect what happened here? what DID happen here, anyway?
// TODO maybe we should return what we have so far, and let the conversion to a real map take care of it? but then how do we handle missing only one lump (do we grab the rest)? what about duplicate lumps?
// TODO same questions go for the places i used try!(), except i think i got the logic even worse there, idk. write some tests
return None;
}
}
range.last_index = i;
Some(range)
}
}
#[derive(Debug)]
pub struct WADMapEntryBlock {
pub format: MapFormat,
pub name: MapName, // TODO what are the rules in zdoom? can you really use any map name?
pub marker_index: usize,
pub last_index: usize,
pub things_index: Option<usize>,
pub linedefs_index: Option<usize>,
pub sidedefs_index: Option<usize>,
pub vertexes_index: Option<usize>,
pub segs_index: Option<usize>,
pub ssectors_index: Option<usize>,
pub nodes_index: Option<usize>,
pub sectors_index: Option<usize>,
pub reject_index: Option<usize>,
pub blockmap_index: Option<usize>,
pub behavior_index: Option<usize>,
pub textmap_index: Option<usize>,
// TODO endmap
}
// TODO map parsing requires:
// - come up with some way to treat a map as a single unit in a wad (is there anything else that acts this way?)
// - parsers for:
// - THINGS
// - LINEDEFS
// - SIDEDEFS
// - SEGS
// - SSECTORS (deferrable)
// - NODES (deferrable)
// - SECTORS
// - REJECT (deferrable)
// - BLOCKMAP (deferrable)
// - put all this in its own module/hierarchy
named!(hexen_args<[u8; 5]>, count_fixed!(u8, le_u8, 5));
#[derive(Debug)]
pub struct BareDoomThing {
pub x: i16,
pub y: i16,
// TODO what is this
pub angle: i16,
pub doomednum: i16,
// NOTE: boom added two flags, and mbf one more, so this is a decent signal for targeting those (but not 100%)
pub flags: u16,
}
impl BareDoomThing {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.x)?;
writer.write_i16::<LittleEndian>(self.y)?;
writer.write_i16::<LittleEndian>(self.angle)?;
writer.write_i16::<LittleEndian>(self.doomednum)?;
writer.write_u16::<LittleEndian>(self.flags)?;
Ok(())
}
}
// TODO totally different in hexen
named!(doom_things_lump<Vec<BareDoomThing>>, terminated!(many0!(do_parse!(
x: le_i16 >>
y: le_i16 >>
angle: le_i16 >>
doomednum: le_i16 >>
flags: le_u16 >>
(BareDoomThing{ x, y, angle, doomednum, flags })
)), eof!()));
#[derive(Debug)]
pub struct BareHexenThing {
// TODO is this really signed in hexen?
pub tid: i16,
pub x: i16,
pub y: i16,
pub z: i16,
// TODO what is this
pub angle: i16,
pub doomednum: i16,
pub flags: u16,
pub special: u8,
pub args: [u8; 5],
}
// TODO totally different in hexen
named!(hexen_things_lump<Vec<BareHexenThing>>, terminated!(many0!(do_parse!(
tid: le_i16 >>
x: le_i16 >>
y: le_i16 >>
z: le_i16 >>
angle: le_i16 >>
doomednum: le_i16 >>
flags: le_u16 >>
special: le_u8 >>
args: hexen_args >>
(BareHexenThing{
tid,
x,
y,
z,
angle,
doomednum,
flags,
special,
args,
})
)), eof!()));
pub trait BareBinaryThing {
fn coords(&self) -> (i16, i16);
fn doomednum(&self) -> i16;
}
impl BareBinaryThing for BareDoomThing {
fn coords(&self) -> (i16, i16) {
(self.x, self.y)
}
fn doomednum(&self) -> i16 {
self.doomednum
}
}
impl BareBinaryThing for BareHexenThing {
fn coords(&self) -> (i16, i16) {
(self.x, self.y)
}
fn doomednum(&self) -> i16 {
self.doomednum
}
}
// FIXME vertex/sidedef indices are i16 in vanilla, but extended to u16 in most source ports; note that for true vanilla, a negative index makes no sense anyway
// FIXME hexen extends this, which requires detecting hexen format
// FIXME what exactly is the higher-level structure that holds actual references to the sidedefs?
#[derive(Debug)]
pub struct BareDoomLine {
pub v0: i16,
pub v1: i16,
pub flags: i16,
pub special: i16,
pub sector_tag: i16,
// NOTE: -1 to mean none
pub front_sidedef: i16,
pub back_sidedef: i16,
}
impl BareDoomLine {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.v0)?;
writer.write_i16::<LittleEndian>(self.v1)?;
writer.write_i16::<LittleEndian>(self.flags)?;
writer.write_i16::<LittleEndian>(self.special)?;
writer.write_i16::<LittleEndian>(self.sector_tag)?;
writer.write_i16::<LittleEndian>(self.front_sidedef)?;
writer.write_i16::<LittleEndian>(self.back_sidedef)?;
Ok(())
}
}
named!(doom_linedefs_lump<Vec<BareDoomLine>>, terminated!(many0!(do_parse!(
v0: le_i16 >>
v1: le_i16 >>
flags: le_i16 >>
special: le_i16 >>
sector_tag: le_i16 >>
front_sidedef: le_i16 >>
back_sidedef: le_i16 >>
(BareDoomLine{ v0, v1, flags, special, sector_tag, front_sidedef, back_sidedef })
)), eof!()));
// TODO source ports extended ids to unsigned here too
#[derive(Debug)]
pub struct BareHexenLine {
pub v0: i16,
pub v1: i16,
pub flags: i16,
pub special: u8,
pub args: [u8; 5],
// NOTE: -1 to mean none
pub front_sidedef: i16,
pub back_sidedef: i16,
}
named!(hexen_linedefs_lump<Vec<BareHexenLine>>, terminated!(many0!(do_parse!(
v0: le_i16 >>
v1: le_i16 >>
flags: le_i16 >>
special: le_u8 >>
args: hexen_args >>
front_sidedef: le_i16 >>
back_sidedef: le_i16 >>
(BareHexenLine{
v0,
v1,
flags,
special,
args,
front_sidedef,
back_sidedef,
})
)), eof!()));
pub trait BareBinaryLine {
fn vertex_indices(&self) -> (i16, i16);
fn side_indices(&self) -> (i16, i16);
fn has_special(&self) -> bool;
fn flags(&self) -> i16;
}
impl BareBinaryLine for BareDoomLine {
fn vertex_indices(&self) -> (i16, i16) {
(self.v0, self.v1)
}
fn side_indices(&self) -> (i16, i16) {
(self.front_sidedef, self.back_sidedef)
}
fn has_special(&self) -> bool {
self.special != 0
}
fn flags(&self) -> i16 {
self.flags
}
}
impl BareBinaryLine for BareHexenLine {
fn vertex_indices(&self) -> (i16, i16) {
(self.v0, self.v1)
}
fn side_indices(&self) -> (i16, i16) {
(self.front_sidedef, self.back_sidedef)
}
fn has_special(&self) -> bool {
self.special != 0
}
fn flags(&self) -> i16 {
self.flags
}
}
#[derive(Debug)]
pub struct BareSide<'a> {
pub x_offset: i16,
pub y_offset: i16,
pub upper_texture: &'a str,
pub lower_texture: &'a str,
pub middle_texture: &'a str,
pub sector: i16,
}
impl<'a> BareSide<'a> {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.x_offset)?;
writer.write_i16::<LittleEndian>(self.y_offset)?;
writer.write(self.upper_texture.as_bytes())?;
for _ in self.upper_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write(self.lower_texture.as_bytes())?;
for _ in self.lower_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write(self.middle_texture.as_bytes())?;
for _ in self.middle_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write_i16::<LittleEndian>(self.sector)?;
Ok(())
}
}
// FIXME using many0! followed by eof! means that if the parse fails, many0! thinks that's a
// success, stops, and then hits the eof! and fails, which loses the original error and is really
// confusing
// TODO file some tickets on nom:
// - docs for call! are actually for apply!
// - many_till! with eof! gives a bizarre error about being unable to infer a type for E
// - error management guide seems to be pre-2.0; mentions importing from nom::util, which is
// private, and makes no mention of verbose vs simple errors at all
// - also, even with verbose errors, error handling kinda sucks? i'm not even sure why this is
// an enum when it gives me completely useless alternations, some of which (ManyTill) are
// thrown in multiple places
// - seems impossible to use a different error type due to rust's not very good inference rules
// - many_till throws away the underlying error.
macro_rules! typed_eof (
($i:expr,) => (
{
let res: IResult<_, _> = eof!($i,);
res
}
);
);
named!(sidedefs_lump<Vec<BareSide>>, map!(many_till!(do_parse!(
x_offset: le_i16 >>
y_offset: le_i16 >>
upper_texture: apply!(fixed_length_ascii, 8) >>
lower_texture: apply!(fixed_length_ascii, 8) >>
middle_texture: apply!(fixed_length_ascii, 8) >>
sector: le_i16 >>
(BareSide{
x_offset,
y_offset,
upper_texture,
lower_texture,
middle_texture,
sector
})
), typed_eof!()), |(r, _)| r));
// FIXME: vertices are i16 for vanilla, 15/16 fixed for ps/n64, effectively infinite but really f32 for udmf
#[derive(Debug)]
pub struct BareVertex {
pub x: i16,
pub y: i16,
}
impl BareVertex {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.x)?;
writer.write_i16::<LittleEndian>(self.y)?;
Ok(())
}
}
named!(vertexes_lump<Vec<BareVertex>>, terminated!(many0!(do_parse!(
x: le_i16 >>
y: le_i16 >>
(BareVertex{ x, y })
)), eof!()));
#[derive(Debug)]
pub struct BareSector<'a> {
pub floor_height: i16,
pub ceiling_height: i16,
pub floor_texture: &'a str,
pub ceiling_texture: &'a str,
pub light: i16, // XXX what?? light can only go up to 255!
pub sector_type: i16, // TODO check if these are actually signed or what
pub sector_tag: i16,
}
impl<'a> BareSector<'a> {
pub fn write_to(&self, writer: &mut Write) -> Result<()> {
writer.write_i16::<LittleEndian>(self.floor_height)?;
writer.write_i16::<LittleEndian>(self.ceiling_height)?;
writer.write(self.floor_texture.as_bytes())?;
for _ in self.floor_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write(self.ceiling_texture.as_bytes())?;
for _ in self.ceiling_texture.len() .. 8 {
writer.write(&[0])?;
}
writer.write_i16::<LittleEndian>(self.light)?;
writer.write_i16::<LittleEndian>(self.sector_type)?;
writer.write_i16::<LittleEndian>(self.sector_tag)?;
Ok(())
}
}
named!(sectors_lump<Vec<BareSector>>, terminated!(many0!(do_parse!(
floor_height: le_i16 >>
ceiling_height: le_i16 >>
floor_texture: apply!(fixed_length_ascii, 8) >>
ceiling_texture: apply!(fixed_length_ascii, 8) >>
light: le_i16 >>
sector_type: le_i16 >>
sector_tag: le_i16 >>
(BareSector{
floor_height,
ceiling_height,
floor_texture,
ceiling_texture,
light,
sector_type,
sector_tag,
})
)), eof!()));
#[derive(Debug)]
pub struct BareBinaryMap<'a, L: BareBinaryLine, T: BareBinaryThing> {
pub vertices: Vec<BareVertex>,
pub sectors: Vec<BareSector<'a>>,
pub sides: Vec<BareSide<'a>>,
pub lines: Vec<L>,
pub things: Vec<T>,
}
/// The result of parsing a Doom-format map definition. The contained
/// structures have not been changed in any way. Everything is public, and
/// nothing is preventing you from meddling with the contained data in a way
/// that might make it invalid.
pub type BareDoomMap<'a> = BareBinaryMap<'a, BareDoomLine, BareDoomThing>;
/// The result of parsing a Hexen-format map definition. The contained
/// structures have not been changed in any way. Everything is public, and
/// nothing is preventing you from meddling with the contained data in a way
/// that might make it invalid.
pub type BareHexenMap<'a> = BareBinaryMap<'a, BareHexenLine, BareHexenThing>;
#[derive(Debug)]
pub enum BareMap<'a> {
Doom(BareDoomMap<'a>),
Hexen(BareHexenMap<'a>),
}
// TODO much more error handling wow lol
pub fn parse_doom_map<'a>(archive: &'a BareWAD, range: &WADMapEntryBlock) -> Result<BareMap<'a>> {
// TODO the map being parsed doesn't appear in the returned error... sigh
let vertexes_index = range.vertexes_index.ok_or(ErrorKind::MissingMapLump("VERTEXES"))?;
let buf = archive.entry_slice(vertexes_index);
let vertices = nom_to_result("VERTEXES lump", buf, vertexes_lump(buf))?;
let sectors_index = range.sectors_index.ok_or(ErrorKind::MissingMapLump("SECTORS"))?;
let buf = archive.entry_slice(sectors_index);
let sectors = nom_to_result("SECTORS lump", buf, sectors_lump(buf))?;
let sidedefs_index = range.sidedefs_index.ok_or(ErrorKind::MissingMapLump("SIDEDEFS"))?;
let buf = archive.entry_slice(sidedefs_index);
let sides = nom_to_result("SIDEDEFS lump", buf, sidedefs_lump(buf))?;
if range.format == MapFormat::Doom {
let linedefs_index = range.linedefs_index.ok_or(ErrorKind::MissingMapLump("LINEDEFS"))?;
let buf = archive.entry_slice(linedefs_index);
let lines = nom_to_result("LINEDEFS lump", buf, doom_linedefs_lump(buf))?;
let things_index = range.things_index.ok_or(ErrorKind::MissingMapLump("THINGS"))?;
let buf = archive.entry_slice(things_index);
let things = nom_to_result("THINGS lump", buf, doom_things_lump(buf))?;
Ok(BareMap::Doom(BareDoomMap{
vertices,
sectors,
sides,
lines,
things,
}))
}
else {
let linedefs_index = range.linedefs_index.ok_or(ErrorKind::MissingMapLump("LINEDEFS"))?;
let buf = archive.entry_slice(linedefs_index);
let lines = nom_to_result("LINEDEFS lump", buf, hexen_linedefs_lump(buf))?;
let things_index = range.things_index.ok_or(ErrorKind::MissingMapLump("THINGS"))?;
let buf = archive.entry_slice(things_index);
let things = nom_to_result("THINGS lump", buf, hexen_things_lump(buf))?;
Ok(BareMap::Hexen(BareHexenMap{
vertices,
sectors,
sides,
lines,
things,
}))
}
}
#[derive(Copy, Clone, Debug)]
pub enum Facing {
Front,
Back,
}
use std::collections::HashMap;
// TODO ok so this is mildly clever but won't work once we get to UDMF champ
impl<'a, L: BareBinaryLine, T: BareBinaryThing> BareBinaryMap<'a, L, T> {
// TODO this is a horrible fucking mess. but it's a /contained/ horrible fucking mess, so.
pub fn sector_to_polygons(&self, s: usize) -> Vec<Vec<&BareVertex>> {
struct Edge<'a, L: 'a> {
line: &'a L,
side: &'a BareSide<'a>,
facing: Facing,
v0: &'a BareVertex,
v1: &'a BareVertex,
done: bool,
}
// This is just to convince HashMap to hash on the actual reference, not the underlying
// BareVertex value
struct VertexRef<'a>(&'a BareVertex);
impl<'a> PartialEq for VertexRef<'a> {
fn eq(&self, other: &VertexRef) -> bool {
return (self.0 as *const _) == (other.0 as *const _);
}
}
impl<'a> Eq for VertexRef<'a> {}
impl<'a> std::hash::Hash for VertexRef<'a> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
(self.0 as *const BareVertex).hash(state)
}
}
let mut edges = vec![];
let mut vertices_to_edges = HashMap::new();
// TODO linear scan -- would make more sense to turn the entire map into polygons in one go
for line in self.lines.iter() {
let (frontid, backid) = line.side_indices();
// FIXME need to do this better
if frontid != -1 && backid != -1 && self.sides[frontid as usize].sector == self.sides[backid as usize].sector {
continue;
}
for &(facing, sideid) in [(Facing::Front, frontid), (Facing::Back, backid)].iter() {
if sideid == -1 {
continue;
}
// TODO this and the vertices lookups might be bogus and crash...
let side = &self.sides[sideid as usize];
if side.sector as usize == s {
let (v0, v1) = line.vertex_indices();
let edge = Edge{
line,
side,
facing,
// TODO should these be swapped depending on the line facing?
v0: &self.vertices[v0 as usize],
v1: &self.vertices[v1 as usize],
done: false,
};
edges.push(edge);
vertices_to_edges.entry(VertexRef(&self.vertices[v0 as usize])).or_insert(Vec::new()).push(edges.len() - 1);
vertices_to_edges.entry(VertexRef(&self.vertices[v1 as usize])).or_insert(Vec::new()).push(edges.len() - 1);
}
}
}
// Trace sectors by starting at the first side's first vertex and attempting to walk from
// there
let mut outlines = Vec::new();
let mut seen_vertices = HashMap::new();
while edges.len() > 0 {
let mut next_vertices = vec![];
for edge in edges.iter() {
// TODO having done-ness for both edges and vertices seems weird, idk
if !seen_vertices.contains_key(&VertexRef(edge.v0)) {
next_vertices.push(edge.v0);
break;
}
if !seen_vertices.contains_key(&VertexRef(edge.v1)) {
next_vertices.push(edge.v1);
break;
}
}
if next_vertices.is_empty() {
break;
}
let mut outline = Vec::new();
while next_vertices.len() > 0 {
let vertices = next_vertices;
next_vertices = Vec::new();
for vertex in vertices.iter() {
if seen_vertices.contains_key(&VertexRef(vertex)) {
continue;
}
seen_vertices.insert(VertexRef(vertex), true);
outline.push(*vertex);
// TODO so, problems occur here if:
// - a vertex has more than two edges
// - special case: double-sided edges are OK! but we have to eliminate
// those, WITHOUT ruining entirely self-referencing sectors
// - a vertex has one edge
for e in vertices_to_edges.get(&VertexRef(vertex)).unwrap().iter() {
let edge = &mut edges[*e];
if edge.done {
// TODO actually this seems weird? why would this happen.
continue;
}
edge.done = true;
if !seen_vertices.contains_key(&VertexRef(edge.v0)) {
next_vertices.push(edge.v0);
}
else if !seen_vertices.contains_key(&VertexRef(edge.v1)) {
next_vertices.push(edge.v1);
}
// Only add EXACTLY ONE vertex at a time for now -- so, assuming simple
// polygons! Figure out the rest, uh, later.
break;
}
}
}
if outline.len() > 0 {
outlines.push(outline);
}
}
outlines
}
// TODO of course, this doesn't take later movement of sectors into account, dammit
pub fn count_textures(&self) -> HashMap<&str, (usize, f32)> {
let mut counts: HashMap<_, (usize, f32)> = HashMap::new();
// This block exists only so `add` goes out of scope (and stops borrowing counts) before we
// return; I don't know why the compiler cares when `add` clearly doesn't escape
{
let mut add = |tex, area| {
// TODO iirc doom64 or something uses a different empty texture name, "?"
if tex != "-" {
let entry = counts.entry(tex).or_insert((0, 0.0));
entry.0 += 1;
entry.1 += area;
}
};
for line in self.lines.iter() {
let (frontid, backid) = line.side_indices();
if frontid == -1 && backid == -1 {
// No sides; skip
continue;
}
let (v0i, v1i) = line.vertex_indices();
let v0 = &self.vertices[v0i as usize];
let v1 = &self.vertices[v1i as usize];
let dx = (v1.x - v0.x) as f32;
let dy = (v1.y - v0.y) as f32;
let length = (dx * dx + dy * dy).sqrt();
if frontid != -1 && backid != -1 {
// Two-sided line
// TODO checking for the two-sided flag is an interesting map check
// TODO this might be bogus and crash...
// TODO actually that's a good thing to put in a map check
let front_side = &self.sides[frontid as usize];
let back_side = &self.sides[backid as usize];
// TODO sector is an i16?? can it be negative??? indicating no sector?
// (i mean obviously it can be bogus regardless, but can it be deliberately bogus?)
let front_sector = &self.sectors[front_side.sector as usize];
let back_sector = &self.sectors[back_side.sector as usize];
let lowest_ceiling;
let ceiling_diff = front_sector.ceiling_height - back_sector.ceiling_height;
if ceiling_diff > 0 {
let front_upper_height = ceiling_diff as f32;
add(front_side.upper_texture, length * front_upper_height);
lowest_ceiling = back_sector.ceiling_height;
}
else {
let back_upper_height = -ceiling_diff as f32;
add(front_side.upper_texture, length * back_upper_height);
lowest_ceiling = front_sector.ceiling_height;
}
let highest_floor;
let floor_diff = front_sector.floor_height - back_sector.floor_height;
if floor_diff > 0 {
let back_lower_height = floor_diff as f32;
add(back_side.lower_texture, length * back_lower_height);
highest_floor = back_sector.floor_height;
}
else {
let front_lower_height = -floor_diff as f32;
add(front_side.lower_texture, length * front_lower_height);
highest_floor = front_sector.floor_height;
}
let middle_height = (lowest_ceiling - highest_floor) as f32;
// TODO map check for negative height (but this is valid for vavoom-style 3d floors!)
if middle_height > 0.0 {
add(front_side.middle_texture, length * middle_height);
add(back_side.middle_texture, length * middle_height);
}
}
else if backid == -1 {
// Typical one-sided wall
// TODO map check for no two-sided flag
let front_side = &self.sides[frontid as usize];
let front_sector = &self.sectors[front_side.sector as usize];
let middle_height = (front_sector.ceiling_height - front_sector.floor_height) as f32;
add(front_side.middle_texture, length * middle_height);
}
else if frontid == -1 {
// Backwards one-sided wall
// TODO map check for no two-sided flag
// TODO maybe a warning for this case too because it's weird
let back_side = &self.sides[backid as usize];
let back_sector = &self.sectors[back_side.sector as usize];
let middle_height = (back_sector.ceiling_height - back_sector.floor_height) as f32;
add(back_side.middle_texture, length * middle_height);
}
}
}
counts
}
}
pub struct TEXTURExEntry<'a> {
pub name: &'a str,
pub width: i16,
pub height: i16,
}
named!(texturex_lump_header<Vec<i32>>, do_parse!(
numtextures: le_i32 >>
offsets: many_m_n!(numtextures as usize, numtextures as usize, le_i32) >>
(offsets)
));
named!(texturex_lump_entry<TEXTURExEntry>, do_parse!(
name: apply!(fixed_length_ascii, 8) >>
le_i32 >> // "masked", unused
// TODO these should be positive
width: le_i16 >>
height: le_i16 >>
le_i32 >> // "columndirectory", unused
patchcount: le_i16 >>
// TODO patches
(TEXTURExEntry{
name,
width,
height,
})
));
pub fn parse_texturex_names(buf: &[u8]) -> Result<Vec<TEXTURExEntry>> {
let offsets = nom_to_result("TEXTUREx header", buf, texturex_lump_header(buf))?;
let mut ret = Vec::with_capacity(offsets.len());
for (i, &offset) in offsets.iter().enumerate() {
if offset < 0 {
bail!(ErrorKind::NegativeOffset("TEXTUREx", i, offset as isize));
}
// TODO check for too large offset too, instead of Incomplete
ret.push(nom_to_result("TEXTUREx", buf, texturex_lump_entry(&buf[(offset as usize)..]))?);
}
Ok(ret)
}
|
//! _**tini** is a **t**iny **ini**-file parsing library_
//!
//! This small library provides basic functions to operate with ini-files.
//!
//! Features:
//!
//! * no dependencies;
//! * parsing [from file](Ini::from_file), [from reader](Ini::from_reader) and [from buffer](Ini::from_buffer);
//! * [convert parsed value to given type](Ini::get);
//! * [parse comma-separated lists to vectors](Ini::get_vec);
//! * construct new ini-structure with [method chaining](Ini::item);
//! * writing [to file](Ini::to_file), [to writer](Ini::to_writer) and [to buffer](Ini::to_buffer).
//!
//! # Examples
//! ## Read from buffer and get string values
//! ````
//! # use tini::Ini;
//! let conf = Ini::from_buffer(["[search]",
//! "g = google.com",
//! "dd = duckduckgo.com"].join("\n")).unwrap();
//!
//! let g: String = conf.get("search", "g").unwrap();
//! let dd: String = conf.get("search", "dd").unwrap();
//!
//! assert_eq!(g, "google.com");
//! assert_eq!(dd, "duckduckgo.com");
//! ````
//! ## Construct in program and get vectors
//! ````
//! # use tini::Ini;
//! let conf = Ini::new().section("floats")
//! .item_vec("consts", &[3.1416, 2.7183])
//! .section("integers")
//! .item_vec("lost", &[4, 8, 15, 16, 23, 42]);
//!
//! let consts: Vec<f64> = conf.get_vec("floats", "consts").unwrap();
//! let lost: Vec<i32> = conf.get_vec("integers", "lost").unwrap();
//!
//! assert_eq!(consts, [3.1416, 2.7183]);
//! assert_eq!(lost, [4, 8, 15, 16, 23, 42]);
//! ````
pub mod error;
mod ordered_hashmap;
mod parser;
use error::Error;
use ordered_hashmap::OrderedHashMap;
use parser::{parse_line, Parsed};
use std::fmt;
use std::fs::File;
use std::hash::Hash;
use std::io::{self, BufReader, BufWriter, Read, Write};
use std::iter::Iterator;
use std::path::Path;
use std::str::FromStr;
/// Structure for INI-file data
#[derive(Debug)]
pub struct Ini {
#[doc(hidden)]
document: OrderedHashMap<String, Section>,
last_section_name: String,
empty_section: Section,
}
impl Ini {
/// Create an empty Ini (similar to [Ini::default])
pub fn new() -> Ini {
Ini { document: OrderedHashMap::new(), last_section_name: String::new(), empty_section: Section::new() }
}
/// Private construct method which creaate [Ini] struct from input string
fn from_string(string: &str) -> Result<Ini, Error> {
let mut result = Ini::new();
for (index, line) in string.lines().enumerate() {
match parse_line(&line, index)? {
Parsed::Section(name) => result = result.section(name),
Parsed::Value(name, value) => result = result.item(name, value),
_ => (),
};
}
Ok(result)
}
/// Construct Ini from file
///
/// # Errors
/// This function will return an [Error] if file cannot be opened or parsed
///
/// # Examples
/// You may use [Path]
///
/// ```
/// # use std::path::Path;
/// # use tini::Ini;
/// let path = Path::new("./examples/example.ini");
///
/// let conf = Ini::from_file(path);
///
/// assert!(conf.ok().is_some());
/// ```
///
/// or `&str`
///
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_file("./examples/example.ini");
///
/// assert!(conf.ok().is_some());
/// ```
pub fn from_file<S: AsRef<Path> + ?Sized>(path: &S) -> Result<Ini, Error> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
Ini::from_reader(&mut reader)
}
/// Construct Ini from any struct who implement [Read](std::io::Read) trait
///
/// # Errors
/// This function will return an [Error] if reader cannot be read or parsed
///
/// # Example
///
/// ```
/// # use std::io::BufReader;
/// # use std::fs::File;
/// # use tini::Ini;
/// let f = File::open("./examples/example.ini").unwrap();
/// let mut reader = BufReader::new(f);
///
/// let conf = Ini::from_reader(&mut reader);
///
/// assert!(conf.ok().is_some());
/// ```
pub fn from_reader<R: Read>(reader: &mut R) -> Result<Ini, Error> {
let mut buffer = String::new();
reader.read_to_string(&mut buffer)?;
Ini::from_string(&buffer)
}
/// Construct Ini from buffer which can be [Into]ed to String
///
/// # Errors
/// This function will return an [Error] if buffer cannot be parsed
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\none = 1").unwrap();
///
/// let value: Option<u8> = conf.get("section", "one");
/// assert_eq!(value, Some(1));
/// ```
pub fn from_buffer<S: Into<String>>(buf: S) -> Result<Ini, Error> {
Ini::from_string(&buf.into())
}
/// Write Ini to file. This function is similar to [from_file](Ini::from_file) in use.
///
/// # Errors
/// Errors returned by [File::create] and [Write::write_all]
pub fn to_file<S: AsRef<Path> + ?Sized>(&self, path: &S) -> Result<(), io::Error> {
let file = File::create(path)?;
let mut writer = BufWriter::new(file);
self.to_writer(&mut writer)
}
/// Write [Ini] to any struct who implement [Write] trait.
///
/// # Errors
/// Errors returned by [Write::write_all](Write::write_all)
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::default().section("a").item("a", 1);
///
/// // create output Vec<u8> buffer
/// let mut output = Vec::new();
/// // let's write data to Vec<u8>
/// conf.to_writer(&mut output);
///
/// // cast Vec<u8> to utf-8 string
/// let casted_result = String::from_utf8(output).unwrap();
/// assert_eq!(casted_result, "[a]\na = 1")
/// ```
pub fn to_writer<W: Write>(&self, writer: &mut W) -> Result<(), io::Error> {
writer.write_all(self.to_buffer().as_bytes())?;
Ok(())
}
/// Write [Ini] to buffer (similar to `to_string()`)
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\none = 1").unwrap();
///
/// // you may use `conf.to_buffer()`
/// let value: String = conf.to_buffer();
///
/// // or conf.to_string();
/// let value = conf.to_string();
///
/// // or format!("{}", conf);
/// let value: String = format!("{}", conf);
///
/// // but the result will be the same
/// assert_eq!(value, "[section]\none = 1");
/// ```
pub fn to_buffer(&self) -> String {
self.to_string()
}
/// Set section name for the following methods in chain ([`item()`](Ini::item), [`items()`](Ini::items), etc.)
///
/// # Warning
/// This function doesn't create a section.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut conf = Ini::new().section("empty");
/// assert_eq!(conf.to_buffer(), "");
///
/// // but section will be created on item() call
/// conf = conf.section("one").item("a", 1);
/// assert_eq!(conf.to_buffer(), "[one]\na = 1");
/// ```
pub fn section<S: Into<String>>(mut self, name: S) -> Self {
self.last_section_name = name.into();
self
}
/// Add key-value pair to last section.
///
/// - `name` must support [Into] to [String]
/// - `value` must support [Display](fmt::Display) to support conversion to [String]
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new().section("test")
/// .item("value", 10);
///
/// assert_eq!(conf.to_buffer(), "[test]\nvalue = 10");
/// ```
pub fn item<N, V>(mut self, name: N, value: V) -> Self
where
N: Into<String>,
V: fmt::Display,
{
self.document
.entry(self.last_section_name.clone())
.or_insert_with(Section::new)
.insert(name.into(), value.to_string());
self
}
/// Add key-vector pair to last section separated by `sep` string.
///
/// - `name` must support [Into] to [String]
/// - `vector` elements must support [Display](fmt::Display) to support conversion to [String]
/// - `sep` arbitrary string delimiter
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new()
/// .section("default")
/// // add a vector with `,` separator: 1,2,3,4
/// .item_vec_with_sep("a", &[1, 2, 3, 4], ",")
/// // add a vector with `|` separator: a|b|c
/// .item_vec_with_sep("b", &vec!["a", "b", "c"], "|");
///
/// let va: Option<Vec<u8>> = conf.get_vec("default", "a");
/// let vb: Vec<String> = conf.get_vec_with_sep("default", "b", "|").unwrap();
///
/// assert_eq!(va, Some(vec![1, 2, 3, 4]));
/// assert_eq!(vb, ["a", "b", "c"]);
/// ```
pub fn item_vec_with_sep<S, V>(mut self, name: S, vector: &[V], sep: &str) -> Self
where
S: Into<String>,
V: fmt::Display,
{
let vector_data = vector.iter().map(|v| format!("{}", v)).collect::<Vec<_>>().join(sep);
self.document
.entry(self.last_section_name.clone())
.or_insert_with(Section::new)
.insert(name.into(), vector_data);
self
}
/// Add key-vector pair to last section.
///
/// - `name` must support [Into] to [String]
/// - `vector` elements must support [Display](fmt::Display) to support conversion to [String]
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new()
/// .section("default")
/// // add vector with default separator `, `
/// .item_vec("a", &[1, 2, 3, 4])
/// // and another vector
/// .item_vec("b", &vec!["a", "b", "c"]);
///
/// let va: Option<Vec<u8>> = conf.get_vec("default", "a");
/// let vb: Vec<String> = conf.get_vec("default", "b").unwrap();
///
/// assert_eq!(va, Some(vec![1, 2, 3, 4]));
/// assert_eq!(vb, ["a", "b", "c"]);
/// ```
pub fn item_vec<S, V>(self, name: S, vector: &[V]) -> Self
where
S: Into<String>,
V: fmt::Display,
{
self.item_vec_with_sep(name, vector, ", ")
}
/// Insert items from any object supporting [IntoIterator] into last section.
///
/// # Example
/// ```
/// # use tini::Ini;
/// use std::collections::HashMap;
///
/// let mut conf = Ini::new()
/// .section("colors")
/// .items(vec![("black", "#000000"),
/// ("white", "#ffffff")]);
///
/// // create custom section
/// let mut numbers = HashMap::new();
/// numbers.insert("round_pi", 3);
/// // and add to `conf`
/// conf = conf.section("numbers").items(numbers);
///
/// assert_eq!(conf.to_buffer(), [
/// "[colors]",
/// "black = #000000",
/// "white = #ffffff",
/// "",
/// "[numbers]",
/// "round_pi = 3"
/// ].join("\n"));
/// ```
pub fn items<K, V, I>(mut self, items: I) -> Self
where
K: fmt::Display + Eq + Hash,
V: fmt::Display,
I: IntoIterator<Item = (K, V)>,
{
for (k, v) in items {
self = self.item(k.to_string(), v.to_string());
}
self
}
/// Remove section from [Ini].
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut config = Ini::from_buffer([
/// "[one]",
/// "a = 1",
/// "[two]",
/// "b = 2"
/// ].join("\n")).unwrap();
///
/// config = config.section("one").clear();
///
/// assert_eq!(config.to_buffer(), "[two]\nb = 2");
/// ```
pub fn clear(mut self) -> Self {
self.document.remove(&self.last_section_name);
self
}
/// Remove item from section.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut config = Ini::from_buffer([
/// "[one]",
/// "a = 1",
/// "b = 2"
/// ].join("\n")).unwrap();
///
/// config = config.section("one").remove("b");
///
/// assert_eq!(config.to_buffer(), "[one]\na = 1");
/// ```
pub fn remove<K: Into<String>>(mut self, key: K) -> Self {
let key = key.into();
if let Some(sec) = self.document.get_mut(&self.last_section_name) {
sec.remove(&key);
}
self
}
/// Private method which get value by `key` from `section`
fn get_raw(&self, section: &str, key: &str) -> Option<&String> {
self.document.get(section).and_then(|x| x.get(key))
}
/// Get scalar value of key in section.
///
/// - output type `T` must implement [FromStr] trait for auto conversion
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\none = 1").unwrap();
///
/// let value: Option<u8> = conf.get("section", "one");
///
/// assert_eq!(value, Some(1));
/// ```
pub fn get<T: FromStr>(&self, section: &str, key: &str) -> Option<T> {
self.get_raw(section, key).and_then(|x| x.parse().ok())
}
/// Get vector value of `key` in `section`.
///
/// The function returns [None](Option::None) if one of the elements can not be parsed.
///
/// - output type `T` must implement [FromStr] trait for auto conversion
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\nlist = 1, 2, 3, 4").unwrap();
///
/// let value: Option<Vec<u8>> = conf.get_vec("section", "list");
///
/// assert_eq!(value, Some(vec![1, 2, 3, 4]));
/// ```
pub fn get_vec<T>(&self, section: &str, key: &str) -> Option<Vec<T>>
where
T: FromStr,
{
self.get_vec_with_sep(section, key, ",")
}
/// Get vector value of `key` in `section` separated by `sep` string.
///
/// The function returns [None](Option::None) if one of the elements can not be parsed or not found.
///
/// - output type `T` must implement [FromStr] trait for auto conversion
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\nlist = 1|2|3|4").unwrap();
///
/// let value: Option<Vec<u8>> = conf.get_vec_with_sep("section", "list", "|");
///
/// assert_eq!(value, Some(vec![1, 2, 3, 4]));
/// ```
pub fn get_vec_with_sep<T>(&self, section: &str, key: &str, sep: &str) -> Option<Vec<T>>
where
T: FromStr,
{
self.get_raw(section, key)
.and_then(|x| x.split(sep).map(|s| s.trim().parse()).collect::<Result<Vec<T>, _>>().ok())
}
/// Get iterator for section with given name.
///
/// If section with given name doesn't exist in document, method returns empty iterator
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer(["[search]",
/// "g = google.com",
/// "dd = duckduckgo.com"].join("\n")).unwrap();
///
/// for (key, value) in conf.section_iter("search") {
/// match key.as_str() {
/// "g" => assert_eq!(value.as_str(), "google.com"),
/// "dd" => assert_eq!(value.as_str(), "duckduckgo.com"),
/// _ => assert!(false),
/// }
/// }
/// assert_eq!(conf.section_iter("absent").count(), 0);
/// ```
pub fn section_iter<K: Into<String>>(&self, section: K) -> SectionIter {
let name = section.into();
SectionIter { iter: self.document.get(&name).unwrap_or(&self.empty_section).iter() }
}
/// Iterate over all sections, yielding pairs of section name and iterator
/// over the section elements. The concrete iterator element type is
/// `(&'a String, SectionIter<'a>)`.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new().section("foo")
/// .item("item", "value")
/// .item("other", "something")
/// .section("bar")
/// .item("one", "1");
///
/// for (name, section_iter) in conf.iter() {
/// match name.as_str() {
/// "foo" => assert_eq!(section_iter.count(), 2),
/// "bar" => assert_eq!(section_iter.count(), 1),
/// _ => assert!(false),
/// }
/// }
pub fn iter(&self) -> IniIter {
IniIter { iter: self.document.iter() }
}
/// Iterate over all sections, yielding pairs of section name and mutable
/// iterator over the section elements. The concrete iterator element type is
/// `(&'a String, SectionIterMut<'a>)`.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut conf = Ini::new().section("foo")
/// .item("item", "value")
/// .item("other", "something")
/// .section("bar")
/// .item("one", "1");
///
/// for (name, section_iter) in conf.iter_mut() {
/// for (key, val) in section_iter {
/// *val = String::from("replaced");
/// }
/// }
///
/// for (name, section_iter) in conf.iter() {
/// for (key, val) in section_iter {
/// assert_eq!(val.as_str(), "replaced");
/// }
/// }
pub fn iter_mut(&mut self) -> IniIterMut {
IniIterMut { iter: self.document.iter_mut() }
}
}
impl fmt::Display for Ini {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = String::new();
for (name, section) in self.iter() {
buffer.push_str(&format!("[{}]\n", name));
for (key, value) in section {
buffer.push_str(&format!("{} = {}\n", key, value));
}
// blank line between sections
buffer.push_str("\n");
}
// remove last two '\n'
buffer.pop();
buffer.pop();
write!(f, "{}", buffer)
}
}
impl Default for Ini {
fn default() -> Self {
Self::new()
}
}
pub struct IniIter<'a> {
#[doc(hidden)]
iter: ordered_hashmap::Iter<'a, String, Section>,
}
impl<'a> Iterator for IniIter<'a> {
type Item = (&'a String, SectionIter<'a>);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(name, section)| (name, SectionIter { iter: section.iter() }))
}
}
pub struct IniIterMut<'a> {
#[doc(hidden)]
iter: ordered_hashmap::IterMut<'a, String, Section>,
}
impl<'a> Iterator for IniIterMut<'a> {
type Item = (&'a String, SectionIterMut<'a>);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(name, section)| (name, SectionIterMut { iter: section.iter_mut() }))
}
}
type Section = OrderedHashMap<String, String>;
pub struct SectionIter<'a> {
#[doc(hidden)]
iter: ordered_hashmap::Iter<'a, String, String>,
}
impl<'a> Iterator for SectionIter<'a> {
type Item = (&'a String, &'a String);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
pub struct SectionIterMut<'a> {
#[doc(hidden)]
iter: ordered_hashmap::IterMut<'a, String, String>,
}
impl<'a> Iterator for SectionIterMut<'a> {
type Item = (&'a String, &'a mut String);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
#[cfg(test)]
mod library_test {
use super::*;
#[test]
fn bool() -> Result<(), Error> {
let ini = Ini::from_buffer("[string]\nabc = true")?;
let abc: Option<bool> = ini.get("string", "abc");
assert_eq!(abc, Some(true));
Ok(())
}
#[test]
fn float() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nname=10.5")?;
let name: Option<f64> = ini.get("section", "name");
assert_eq!(name, Some(10.5));
Ok(())
}
#[test]
fn float_vec() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nname=1.2, 3.4, 5.6")?;
let name: Option<Vec<f64>> = ini.get_vec("section", "name");
assert_eq!(name, Some(vec![1.2, 3.4, 5.6]));
Ok(())
}
#[test]
fn bad_cast() {
let ini = Ini::new().section("one").item("a", "3.14");
let a: Option<u32> = ini.get("one", "a");
assert_eq!(a, None);
}
#[test]
fn string_vec() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nname=a, b, c")?;
let name: Vec<String> = ini.get_vec("section", "name").unwrap_or(vec![]);
assert_eq!(name, ["a", "b", "c"]);
Ok(())
}
#[test]
fn parse_error() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nlist = 1, 2, --, 4")?;
let name: Option<Vec<u8>> = ini.get_vec("section", "list");
assert_eq!(name, None);
Ok(())
}
#[test]
fn get_or_macro() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nlist = 1, 2, --, 4")?;
let with_value: Vec<u8> = ini.get_vec("section", "list").unwrap_or(vec![1, 2, 3, 4]);
assert_eq!(with_value, [1, 2, 3, 4]);
Ok(())
}
#[test]
fn ordering_iter() -> Result<(), Error> {
let ini = Ini::from_string("[a]\nc = 1\nb = 2\na = 3")?;
let keys: Vec<&String> = ini.document.get("a").unwrap().iter().map(|(k, _)| k).collect();
assert_eq!(["c", "b", "a"], keys[..]);
Ok(())
}
#[test]
fn mutating() {
let mut config = Ini::new().section("items").item("a", "1").item("b", "2").item("c", "3");
// mutate items
for (_, iter) in config.iter_mut() {
for (_, value) in iter {
let v: i32 = value.parse().unwrap();
*value = format!("{}", v + 1);
}
}
let a_val: Option<u8> = config.get("items", "a");
let b_val: Option<u8> = config.get("items", "b");
let c_val: Option<u8> = config.get("items", "c");
assert_eq!(a_val, Some(2));
assert_eq!(b_val, Some(3));
assert_eq!(c_val, Some(4));
}
#[test]
fn redefine_item() {
let config = Ini::new().section("items").item("one", "3").item("two", "2").item("one", "1");
let one: Option<i32> = config.get("items", "one");
assert_eq!(one, Some(1));
}
#[test]
fn redefine_section() {
let config =
Ini::new().section("one").item("a", "1").section("two").item("b", "2").section("one").item("c", "3");
let a_val: Option<i32> = config.get("one", "a");
let c_val: Option<i32> = config.get("one", "c");
assert_eq!(a_val, Some(1));
assert_eq!(c_val, Some(3));
}
#[test]
fn with_escaped_items() {
let config = Ini::new().section("default").item("vector", r"1, 2, 3, 4, 5, 6, 7");
let vector: Vec<String> = config.get_vec("default", "vector").unwrap();
assert_eq!(vector, ["1", "2", "3", "4", "5", "6", "7"]);
}
#[test]
fn use_item_vec() {
let config = Ini::new().section("default").item_vec_with_sep("a", &["a,b", "c,d", "e"], "|");
let v: Vec<String> = config.get_vec_with_sep("default", "a", "|").unwrap();
assert_eq!(v, [r"a,b", "c,d", "e"]);
}
#[test]
fn remove_section() {
let mut config = Ini::new().section("one").item("a", "1").section("two").item("b", "2");
config = config.section("one").clear();
assert_eq!(config.get::<u8>("one", "a"), None);
assert_eq!(config.get::<u8>("two", "b"), Some(2));
}
#[test]
fn remove_item() {
let mut config = Ini::new().section("one").item("a", "1").item("b", "2");
config = config.section("one").remove("a");
assert_eq!(config.get::<u8>("one", "a"), None);
assert_eq!(config.get::<u8>("one", "b"), Some(2));
}
}
polish docs
//! _**tini** is a **t**iny **ini**-file parsing library_
//!
//! This small library provides basic functions to operate with ini-files.
//!
//! Features:
//!
//! * no dependencies;
//! * parsing [from file](Ini::from_file), [from reader](Ini::from_reader) and [from buffer](Ini::from_buffer);
//! * [convert parsed value to given type](Ini::get);
//! * [parse comma-separated lists to vectors](Ini::get_vec);
//! * construct new ini-structure with [method chaining](Ini::item);
//! * writing [to file](Ini::to_file), [to writer](Ini::to_writer) and [to buffer](Ini::to_buffer).
//!
//! # Examples
//! ## Read from buffer and get string values
//! ````
//! # use tini::Ini;
//! let conf = Ini::from_buffer(["[search]",
//! "g = google.com",
//! "dd = duckduckgo.com"].join("\n")).unwrap();
//!
//! let g: String = conf.get("search", "g").unwrap();
//! let dd: String = conf.get("search", "dd").unwrap();
//!
//! assert_eq!(g, "google.com");
//! assert_eq!(dd, "duckduckgo.com");
//! ````
//! ## Construct in program and get vectors
//! ````
//! # use tini::Ini;
//! let conf = Ini::new().section("floats")
//! .item_vec("consts", &[3.1416, 2.7183])
//! .section("integers")
//! .item_vec("lost", &[4, 8, 15, 16, 23, 42]);
//!
//! let consts: Vec<f64> = conf.get_vec("floats", "consts").unwrap();
//! let lost: Vec<i32> = conf.get_vec("integers", "lost").unwrap();
//!
//! assert_eq!(consts, [3.1416, 2.7183]);
//! assert_eq!(lost, [4, 8, 15, 16, 23, 42]);
//! ````
pub mod error;
mod ordered_hashmap;
mod parser;
use error::Error;
use ordered_hashmap::OrderedHashMap;
use parser::{parse_line, Parsed};
use std::fmt;
use std::fs::File;
use std::hash::Hash;
use std::io::{self, BufReader, BufWriter, Read, Write};
use std::iter::Iterator;
use std::path::Path;
use std::str::FromStr;
/// Structure for INI-file data
#[derive(Debug)]
pub struct Ini {
#[doc(hidden)]
document: OrderedHashMap<String, Section>,
last_section_name: String,
empty_section: Section,
}
impl Ini {
/// Create an empty Ini (similar to [Ini::default])
pub fn new() -> Ini {
Ini { document: OrderedHashMap::new(), last_section_name: String::new(), empty_section: Section::new() }
}
/// Private construct method which creaate [Ini] struct from input string
fn from_string(string: &str) -> Result<Ini, Error> {
let mut result = Ini::new();
for (index, line) in string.lines().enumerate() {
match parse_line(&line, index)? {
Parsed::Section(name) => result = result.section(name),
Parsed::Value(name, value) => result = result.item(name, value),
_ => (),
};
}
Ok(result)
}
/// Construct Ini from file
///
/// # Errors
/// This function will return an [Error] if file cannot be opened or parsed
///
/// # Examples
/// You may use [Path]
///
/// ```
/// # use std::path::Path;
/// # use tini::Ini;
/// let path = Path::new("./examples/example.ini");
///
/// let conf = Ini::from_file(path);
///
/// assert!(conf.ok().is_some());
/// ```
///
/// or `&str`
///
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_file("./examples/example.ini");
///
/// assert!(conf.ok().is_some());
/// ```
pub fn from_file<S: AsRef<Path> + ?Sized>(path: &S) -> Result<Ini, Error> {
let file = File::open(path)?;
let mut reader = BufReader::new(file);
Ini::from_reader(&mut reader)
}
/// Construct Ini from any struct who implement [Read](std::io::Read) trait
///
/// # Errors
/// This function will return an [Error] if reader cannot be read or parsed
///
/// # Example
///
/// ```
/// # use std::io::BufReader;
/// # use std::fs::File;
/// # use tini::Ini;
/// let f = File::open("./examples/example.ini").unwrap();
/// let mut reader = BufReader::new(f);
///
/// let conf = Ini::from_reader(&mut reader);
///
/// assert!(conf.ok().is_some());
/// ```
pub fn from_reader<R: Read>(reader: &mut R) -> Result<Ini, Error> {
let mut buffer = String::new();
reader.read_to_string(&mut buffer)?;
Ini::from_string(&buffer)
}
/// Construct Ini from buffer which can be [Into]ed to String
///
/// # Errors
/// This function will return an [Error] if buffer cannot be parsed
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\none = 1").unwrap();
///
/// let value: Option<u8> = conf.get("section", "one");
/// assert_eq!(value, Some(1));
/// ```
pub fn from_buffer<S: Into<String>>(buf: S) -> Result<Ini, Error> {
Ini::from_string(&buf.into())
}
/// Write Ini to file. This function is similar to [from_file](Ini::from_file) in use.
///
/// # Errors
/// Errors returned by [File::create] and [Write::write_all]
pub fn to_file<S: AsRef<Path> + ?Sized>(&self, path: &S) -> Result<(), io::Error> {
let file = File::create(path)?;
let mut writer = BufWriter::new(file);
self.to_writer(&mut writer)
}
/// Write [Ini] to any struct who implement [Write] trait.
///
/// # Errors
/// Errors returned by [Write::write_all](Write::write_all)
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::default().section("a").item("a", 1);
///
/// // create output Vec<u8> buffer
/// let mut output = Vec::new();
/// // let's write data to Vec<u8>
/// conf.to_writer(&mut output);
///
/// // cast Vec<u8> to utf-8 string
/// let casted_result = String::from_utf8(output).unwrap();
/// assert_eq!(casted_result, "[a]\na = 1")
/// ```
pub fn to_writer<W: Write>(&self, writer: &mut W) -> Result<(), io::Error> {
writer.write_all(self.to_buffer().as_bytes())?;
Ok(())
}
/// Write [Ini] to buffer (similar to `to_string()`)
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\none = 1").unwrap();
///
/// // you may use `conf.to_buffer()`
/// let value: String = conf.to_buffer();
///
/// // or conf.to_string();
/// let value = conf.to_string();
///
/// // or format!("{}", conf);
/// let value: String = format!("{}", conf);
///
/// // but the result will be the same
/// assert_eq!(value, "[section]\none = 1");
/// ```
pub fn to_buffer(&self) -> String {
self.to_string()
}
/// Set section name for the following methods in chain ([`item()`](Ini::item), [`items()`](Ini::items), etc.)
///
/// # Warning
/// This function doesn't create a section.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut conf = Ini::new().section("empty");
/// assert_eq!(conf.to_buffer(), "");
///
/// // but section will be created on item() call
/// conf = conf.section("one").item("a", 1);
/// assert_eq!(conf.to_buffer(), "[one]\na = 1");
/// ```
pub fn section<S: Into<String>>(mut self, name: S) -> Self {
self.last_section_name = name.into();
self
}
/// Add key-value pair to last section.
///
/// - `name` must support [Into] to [String]
/// - `value` must support [Display](fmt::Display) to support conversion to [String]
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new().section("test")
/// .item("value", 10);
///
/// assert_eq!(conf.to_buffer(), "[test]\nvalue = 10");
/// ```
pub fn item<N, V>(mut self, name: N, value: V) -> Self
where
N: Into<String>,
V: fmt::Display,
{
self.document
.entry(self.last_section_name.clone())
.or_insert_with(Section::new)
.insert(name.into(), value.to_string());
self
}
/// Add key-vector pair to last section separated by `sep` string.
///
/// - `name` must support [Into] to [String]
/// - `vector` elements must support [Display](fmt::Display) to support conversion to [String]
/// - `sep` arbitrary string delimiter
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new()
/// .section("default")
/// // add a vector with `,` separator: 1,2,3,4
/// .item_vec_with_sep("a", &[1, 2, 3, 4], ",")
/// // add a vector with `|` separator: a|b|c
/// .item_vec_with_sep("b", &vec!["a", "b", "c"], "|");
///
/// let va: Option<Vec<u8>> = conf.get_vec("default", "a");
/// let vb: Vec<String> = conf.get_vec_with_sep("default", "b", "|").unwrap();
///
/// assert_eq!(va, Some(vec![1, 2, 3, 4]));
/// assert_eq!(vb, ["a", "b", "c"]);
/// ```
pub fn item_vec_with_sep<S, V>(mut self, name: S, vector: &[V], sep: &str) -> Self
where
S: Into<String>,
V: fmt::Display,
{
let vector_data = vector.iter().map(|v| format!("{}", v)).collect::<Vec<_>>().join(sep);
self.document
.entry(self.last_section_name.clone())
.or_insert_with(Section::new)
.insert(name.into(), vector_data);
self
}
/// Add key-vector pair to last section.
///
/// - `name` must support [Into] to [String]
/// - `vector` elements must support [Display](fmt::Display) to support conversion to [String]
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new()
/// .section("default")
/// // add vector with default separator `, `
/// .item_vec("a", &[1, 2, 3, 4])
/// // and another vector
/// .item_vec("b", &vec!["a", "b", "c"]);
///
/// let va: Option<Vec<u8>> = conf.get_vec("default", "a");
/// let vb: Vec<String> = conf.get_vec("default", "b").unwrap();
///
/// assert_eq!(va, Some(vec![1, 2, 3, 4]));
/// assert_eq!(vb, ["a", "b", "c"]);
/// ```
pub fn item_vec<S, V>(self, name: S, vector: &[V]) -> Self
where
S: Into<String>,
V: fmt::Display,
{
self.item_vec_with_sep(name, vector, ", ")
}
/// Insert items from any object supporting [IntoIterator] into last section.
///
/// # Example
/// ```
/// # use tini::Ini;
/// use std::collections::HashMap;
///
/// let mut conf = Ini::new()
/// .section("colors")
/// .items(vec![("black", "#000000"),
/// ("white", "#ffffff")]);
///
/// // create custom section
/// let mut numbers = HashMap::new();
/// numbers.insert("round_pi", 3);
/// // and add to `conf`
/// conf = conf.section("numbers").items(numbers);
///
/// assert_eq!(conf.to_buffer(), [
/// "[colors]",
/// "black = #000000",
/// "white = #ffffff",
/// "",
/// "[numbers]",
/// "round_pi = 3"
/// ].join("\n"));
/// ```
pub fn items<K, V, I>(mut self, items: I) -> Self
where
K: fmt::Display + Eq + Hash,
V: fmt::Display,
I: IntoIterator<Item = (K, V)>,
{
for (k, v) in items {
self = self.item(k.to_string(), v.to_string());
}
self
}
/// Remove section from [Ini].
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut config = Ini::from_buffer([
/// "[one]",
/// "a = 1",
/// "[two]",
/// "b = 2"
/// ].join("\n")).unwrap();
///
/// config = config.section("one").clear();
///
/// assert_eq!(config.to_buffer(), "[two]\nb = 2");
/// ```
pub fn clear(mut self) -> Self {
self.document.remove(&self.last_section_name);
self
}
/// Remove item from section.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut config = Ini::from_buffer([
/// "[one]",
/// "a = 1",
/// "b = 2"
/// ].join("\n")).unwrap();
///
/// config = config.section("one").remove("b");
///
/// assert_eq!(config.to_buffer(), "[one]\na = 1");
/// ```
pub fn remove<K: Into<String>>(mut self, key: K) -> Self {
let key = key.into();
if let Some(sec) = self.document.get_mut(&self.last_section_name) {
sec.remove(&key);
}
self
}
/// Private method which get value by `key` from `section`
fn get_raw(&self, section: &str, key: &str) -> Option<&String> {
self.document.get(section).and_then(|x| x.get(key))
}
/// Get scalar value of key in section.
///
/// - output type `T` must implement [FromStr] trait for auto conversion
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\none = 1").unwrap();
///
/// let value: Option<u8> = conf.get("section", "one");
///
/// assert_eq!(value, Some(1));
/// ```
pub fn get<T: FromStr>(&self, section: &str, key: &str) -> Option<T> {
self.get_raw(section, key).and_then(|x| x.parse().ok())
}
/// Get vector value of `key` in `section`.
///
/// The function returns [None](Option::None) if one of the elements can not be parsed.
///
/// - output type `T` must implement [FromStr] trait for auto conversion
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\nlist = 1, 2, 3, 4").unwrap();
///
/// let value: Option<Vec<u8>> = conf.get_vec("section", "list");
///
/// assert_eq!(value, Some(vec![1, 2, 3, 4]));
/// ```
pub fn get_vec<T>(&self, section: &str, key: &str) -> Option<Vec<T>>
where
T: FromStr,
{
self.get_vec_with_sep(section, key, ",")
}
/// Get vector value of `key` in `section` separated by `sep` string.
///
/// The function returns [None](Option::None) if one of the elements can not be parsed or not found.
///
/// - output type `T` must implement [FromStr] trait for auto conversion
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer("[section]\nlist = 1|2|3|4").unwrap();
///
/// let value: Option<Vec<u8>> = conf.get_vec_with_sep("section", "list", "|");
///
/// assert_eq!(value, Some(vec![1, 2, 3, 4]));
/// ```
pub fn get_vec_with_sep<T>(&self, section: &str, key: &str, sep: &str) -> Option<Vec<T>>
where
T: FromStr,
{
self.get_raw(section, key)
.and_then(|x| x.split(sep).map(|s| s.trim().parse()).collect::<Result<Vec<T>, _>>().ok())
}
/// An iterator visiting all key-value pairs in order of appearance in section.
///
/// If section with given name doesn't exist in document, method returns empty iterator
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::from_buffer(["[search]",
/// "g = google.com",
/// "dd = duckduckgo.com"].join("\n")).unwrap();
///
/// let mut search = conf.section_iter("search");
/// assert_eq!(search.next(), Some((&"g".to_string(), &"google.com".to_string())));
/// assert_eq!(search.next(), Some((&"dd".to_string(), &"duckduckgo.com".to_string())));
/// assert_eq!(search.next(), None);
///
/// assert_eq!(conf.section_iter("absent").count(), 0);
/// ```
pub fn section_iter<K: Into<String>>(&self, section: K) -> SectionIter {
let name = section.into();
SectionIter { iter: self.document.get(&name).unwrap_or(&self.empty_section).iter() }
}
/// Iterate over all sections in order of appearance, yielding pairs of
/// section name and iterator over the section elements. The iterator
/// element type is `(&'a String, SectionIter<'a>)`.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let conf = Ini::new().section("foo")
/// .item("item", "value")
/// .item("other", "something")
/// .section("bar")
/// .item("one", "1");
///
/// for (name, section_iter) in conf.iter() {
/// match name.as_str() {
/// "foo" => assert_eq!(section_iter.count(), 2),
/// "bar" => assert_eq!(section_iter.count(), 1),
/// _ => assert!(false),
/// }
/// }
pub fn iter(&self) -> IniIter {
IniIter { iter: self.document.iter() }
}
/// Iterate over all sections in arbitrary order, yielding pairs of section name and mutable
/// iterator over the section elements. The concrete iterator element type is
/// `(&'a String, SectionIterMut<'a>)`.
///
/// # Example
/// ```
/// # use tini::Ini;
/// let mut conf = Ini::new().section("foo")
/// .item("item", "value")
/// .item("other", "something")
/// .section("bar")
/// .item("one", "1");
///
/// for (name, section_iter) in conf.iter_mut() {
/// for (key, val) in section_iter {
/// *val = String::from("replaced");
/// }
/// }
///
/// for (name, section_iter) in conf.iter() {
/// for (key, val) in section_iter {
/// assert_eq!(val.as_str(), "replaced");
/// }
/// }
pub fn iter_mut(&mut self) -> IniIterMut {
IniIterMut { iter: self.document.iter_mut() }
}
}
impl fmt::Display for Ini {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = String::new();
for (name, section) in self.iter() {
buffer.push_str(&format!("[{}]\n", name));
for (key, value) in section {
buffer.push_str(&format!("{} = {}\n", key, value));
}
// blank line between sections
buffer.push_str("\n");
}
// remove last two '\n'
buffer.pop();
buffer.pop();
write!(f, "{}", buffer)
}
}
impl Default for Ini {
fn default() -> Self {
Self::new()
}
}
/// An iterator over the sections of an ini documet
pub struct IniIter<'a> {
#[doc(hidden)]
iter: ordered_hashmap::Iter<'a, String, Section>,
}
impl<'a> Iterator for IniIter<'a> {
type Item = (&'a String, SectionIter<'a>);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(name, section)| (name, SectionIter { iter: section.iter() }))
}
}
/// A mutable iterator over the sections of an ini documet
pub struct IniIterMut<'a> {
#[doc(hidden)]
iter: ordered_hashmap::IterMut<'a, String, Section>,
}
impl<'a> Iterator for IniIterMut<'a> {
type Item = (&'a String, SectionIterMut<'a>);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|(name, section)| (name, SectionIterMut { iter: section.iter_mut() }))
}
}
type Section = OrderedHashMap<String, String>;
/// An iterator over the entries of a section
pub struct SectionIter<'a> {
#[doc(hidden)]
iter: ordered_hashmap::Iter<'a, String, String>,
}
impl<'a> Iterator for SectionIter<'a> {
type Item = (&'a String, &'a String);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
/// A mutable iterator over the entries of a section
pub struct SectionIterMut<'a> {
#[doc(hidden)]
iter: ordered_hashmap::IterMut<'a, String, String>,
}
impl<'a> Iterator for SectionIterMut<'a> {
type Item = (&'a String, &'a mut String);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
#[cfg(test)]
mod library_test {
use super::*;
#[test]
fn bool() -> Result<(), Error> {
let ini = Ini::from_buffer("[string]\nabc = true")?;
let abc: Option<bool> = ini.get("string", "abc");
assert_eq!(abc, Some(true));
Ok(())
}
#[test]
fn float() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nname=10.5")?;
let name: Option<f64> = ini.get("section", "name");
assert_eq!(name, Some(10.5));
Ok(())
}
#[test]
fn float_vec() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nname=1.2, 3.4, 5.6")?;
let name: Option<Vec<f64>> = ini.get_vec("section", "name");
assert_eq!(name, Some(vec![1.2, 3.4, 5.6]));
Ok(())
}
#[test]
fn bad_cast() {
let ini = Ini::new().section("one").item("a", "3.14");
let a: Option<u32> = ini.get("one", "a");
assert_eq!(a, None);
}
#[test]
fn string_vec() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nname=a, b, c")?;
let name: Vec<String> = ini.get_vec("section", "name").unwrap_or(vec![]);
assert_eq!(name, ["a", "b", "c"]);
Ok(())
}
#[test]
fn parse_error() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nlist = 1, 2, --, 4")?;
let name: Option<Vec<u8>> = ini.get_vec("section", "list");
assert_eq!(name, None);
Ok(())
}
#[test]
fn get_or_macro() -> Result<(), Error> {
let ini = Ini::from_string("[section]\nlist = 1, 2, --, 4")?;
let with_value: Vec<u8> = ini.get_vec("section", "list").unwrap_or(vec![1, 2, 3, 4]);
assert_eq!(with_value, [1, 2, 3, 4]);
Ok(())
}
#[test]
fn ordering_iter() -> Result<(), Error> {
let ini = Ini::from_string("[a]\nc = 1\nb = 2\na = 3")?;
let keys: Vec<&String> = ini.document.get("a").unwrap().iter().map(|(k, _)| k).collect();
assert_eq!(["c", "b", "a"], keys[..]);
Ok(())
}
#[test]
fn mutating() {
let mut config = Ini::new().section("items").item("a", "1").item("b", "2").item("c", "3");
// mutate items
for (_, iter) in config.iter_mut() {
for (_, value) in iter {
let v: i32 = value.parse().unwrap();
*value = format!("{}", v + 1);
}
}
let a_val: Option<u8> = config.get("items", "a");
let b_val: Option<u8> = config.get("items", "b");
let c_val: Option<u8> = config.get("items", "c");
assert_eq!(a_val, Some(2));
assert_eq!(b_val, Some(3));
assert_eq!(c_val, Some(4));
}
#[test]
fn redefine_item() {
let config = Ini::new().section("items").item("one", "3").item("two", "2").item("one", "1");
let one: Option<i32> = config.get("items", "one");
assert_eq!(one, Some(1));
}
#[test]
fn redefine_section() {
let config =
Ini::new().section("one").item("a", "1").section("two").item("b", "2").section("one").item("c", "3");
let a_val: Option<i32> = config.get("one", "a");
let c_val: Option<i32> = config.get("one", "c");
assert_eq!(a_val, Some(1));
assert_eq!(c_val, Some(3));
}
#[test]
fn with_escaped_items() {
let config = Ini::new().section("default").item("vector", r"1, 2, 3, 4, 5, 6, 7");
let vector: Vec<String> = config.get_vec("default", "vector").unwrap();
assert_eq!(vector, ["1", "2", "3", "4", "5", "6", "7"]);
}
#[test]
fn use_item_vec() {
let config = Ini::new().section("default").item_vec_with_sep("a", &["a,b", "c,d", "e"], "|");
let v: Vec<String> = config.get_vec_with_sep("default", "a", "|").unwrap();
assert_eq!(v, [r"a,b", "c,d", "e"]);
}
#[test]
fn remove_section() {
let mut config = Ini::new().section("one").item("a", "1").section("two").item("b", "2");
config = config.section("one").clear();
assert_eq!(config.get::<u8>("one", "a"), None);
assert_eq!(config.get::<u8>("two", "b"), Some(2));
}
#[test]
fn remove_item() {
let mut config = Ini::new().section("one").item("a", "1").item("b", "2");
config = config.section("one").remove("a");
assert_eq!(config.get::<u8>("one", "a"), None);
assert_eq!(config.get::<u8>("one", "b"), Some(2));
}
}
|
//! # Serde JSON
//!
//! JSON is a ubiquitous open-standard format that uses human-readable text to
//! transmit data objects consisting of key-value pairs.
//!
//! ```json
//! {
//! "name": "John Doe",
//! "age": 43,
//! "address": {
//! "street": "10 Downing Street",
//! "city": "London"
//! },
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }
//! ```
//!
//! There are three common ways that you might find yourself needing to work
//! with JSON data in Rust.
//!
//! - **As text data.** An unprocessed string of JSON data that you receive on
//! an HTTP endpoint, read from a file, or prepare to send to a remote
//! server.
//! - **As an untyped or loosely typed representation.** Maybe you want to
//! check that some JSON data is valid before passing it on, but without
//! knowing the structure of what it contains. Or you want to do very basic
//! manipulations like insert a key in a particular spot.
//! - **As a strongly typed Rust data structure.** When you expect all or most
//! of your data to conform to a particular structure and want to get real
//! work done without JSON's loosey-goosey nature tripping you up.
//!
//! Serde JSON provides efficient, flexible, safe ways of converting data
//! between each of these representations.
//!
//! # Operating on untyped JSON values
//!
//! Any valid JSON data can be manipulated in the following recursive enum
//! representation. This data structure is [`serde_json::Value`][value].
//!
//! ```edition2018
//! # use serde_json::{Number, Map};
//! #
//! # #[allow(dead_code)]
//! enum Value {
//! Null,
//! Bool(bool),
//! Number(Number),
//! String(String),
//! Array(Vec<Value>),
//! Object(Map<String, Value>),
//! }
//! ```
//!
//! A string of JSON data can be parsed into a `serde_json::Value` by the
//! [`serde_json::from_str`][from_str] function. There is also
//! [`from_slice`][from_slice] for parsing from a byte slice &[u8] and
//! [`from_reader`][from_reader] for parsing from any `io::Read` like a File or
//! a TCP stream.
//!
//! ```edition2018
//! use serde_json::{Result, Value};
//!
//! fn untyped_example() -> Result<()> {
//! // Some JSON input data as a &str. Maybe this comes from the user.
//! let data = r#"
//! {
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }"#;
//!
//! // Parse the string of data into serde_json::Value.
//! let v: Value = serde_json::from_str(data)?;
//!
//! // Access parts of the data by indexing with square brackets.
//! println!("Please call {} at the number {}", v["name"], v["phones"][0]);
//!
//! Ok(())
//! }
//! #
//! # fn main() {
//! # untyped_example().unwrap();
//! # }
//! ```
//!
//! The result of square bracket indexing like `v["name"]` is a borrow of the
//! data at that index, so the type is `&Value`. A JSON map can be indexed with
//! string keys, while a JSON array can be indexed with integer keys. If the
//! type of the data is not right for the type with which it is being indexed,
//! or if a map does not contain the key being indexed, or if the index into a
//! vector is out of bounds, the returned element is `Value::Null`.
//!
//! When a `Value` is printed, it is printed as a JSON string. So in the code
//! above, the output looks like `Please call "John Doe" at the number "+44
//! 1234567"`. The quotation marks appear because `v["name"]` is a `&Value`
//! containing a JSON string and its JSON representation is `"John Doe"`.
//! Printing as a plain string without quotation marks involves converting from
//! a JSON string to a Rust string with [`as_str()`] or avoiding the use of
//! `Value` as described in the following section.
//!
//! [`as_str()`]: https://docs.serde.rs/serde_json/enum.Value.html#method.as_str
//!
//! The `Value` representation is sufficient for very basic tasks but can be
//! tedious to work with for anything more significant. Error handling is
//! verbose to implement correctly, for example imagine trying to detect the
//! presence of unrecognized fields in the input data. The compiler is powerless
//! to help you when you make a mistake, for example imagine typoing `v["name"]`
//! as `v["nmae"]` in one of the dozens of places it is used in your code.
//!
//! # Parsing JSON as strongly typed data structures
//!
//! Serde provides a powerful way of mapping JSON data into Rust data structures
//! largely automatically.
//!
//! ```edition2018
//! use serde::{Deserialize, Serialize};
//! use serde_json::Result;
//!
//! #[derive(Serialize, Deserialize)]
//! struct Person {
//! name: String,
//! age: u8,
//! phones: Vec<String>,
//! }
//!
//! fn typed_example() -> Result<()> {
//! // Some JSON input data as a &str. Maybe this comes from the user.
//! let data = r#"
//! {
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }"#;
//!
//! // Parse the string of data into a Person object. This is exactly the
//! // same function as the one that produced serde_json::Value above, but
//! // now we are asking it for a Person as output.
//! let p: Person = serde_json::from_str(data)?;
//!
//! // Do things just like with any other Rust data structure.
//! println!("Please call {} at the number {}", p.name, p.phones[0]);
//!
//! Ok(())
//! }
//! #
//! # fn main() {
//! # typed_example().unwrap();
//! # }
//! ```
//!
//! This is the same `serde_json::from_str` function as before, but this time we
//! assign the return value to a variable of type `Person` so Serde will
//! automatically interpret the input data as a `Person` and produce informative
//! error messages if the layout does not conform to what a `Person` is expected
//! to look like.
//!
//! Any type that implements Serde's `Deserialize` trait can be deserialized
//! this way. This includes built-in Rust standard library types like `Vec<T>`
//! and `HashMap<K, V>`, as well as any structs or enums annotated with
//! `#[derive(Deserialize)]`.
//!
//! Once we have `p` of type `Person`, our IDE and the Rust compiler can help us
//! use it correctly like they do for any other Rust code. The IDE can
//! autocomplete field names to prevent typos, which was impossible in the
//! `serde_json::Value` representation. And the Rust compiler can check that
//! when we write `p.phones[0]`, then `p.phones` is guaranteed to be a
//! `Vec<String>` so indexing into it makes sense and produces a `String`.
//!
//! # Constructing JSON values
//!
//! Serde JSON provides a [`json!` macro][macro] to build `serde_json::Value`
//! objects with very natural JSON syntax.
//!
//! ```edition2018
//! use serde_json::json;
//!
//! fn main() {
//! // The type of `john` is `serde_json::Value`
//! let john = json!({
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! });
//!
//! println!("first phone number: {}", john["phones"][0]);
//!
//! // Convert to a string of JSON and print it out
//! println!("{}", john.to_string());
//! }
//! ```
//!
//! The `Value::to_string()` function converts a `serde_json::Value` into a
//! `String` of JSON text.
//!
//! One neat thing about the `json!` macro is that variables and expressions can
//! be interpolated directly into the JSON value as you are building it. Serde
//! will check at compile time that the value you are interpolating is able to
//! be represented as JSON.
//!
//! ```edition2018
//! # use serde_json::json;
//! #
//! # fn random_phone() -> u16 { 0 }
//! #
//! let full_name = "John Doe";
//! let age_last_year = 42;
//!
//! // The type of `john` is `serde_json::Value`
//! let john = json!({
//! "name": full_name,
//! "age": age_last_year + 1,
//! "phones": [
//! format!("+44 {}", random_phone())
//! ]
//! });
//! ```
//!
//! This is amazingly convenient but we have the problem we had before with
//! `Value` which is that the IDE and Rust compiler cannot help us if we get it
//! wrong. Serde JSON provides a better way of serializing strongly-typed data
//! structures into JSON text.
//!
//! # Creating JSON by serializing data structures
//!
//! A data structure can be converted to a JSON string by
//! [`serde_json::to_string`][to_string]. There is also
//! [`serde_json::to_vec`][to_vec] which serializes to a `Vec<u8>` and
//! [`serde_json::to_writer`][to_writer] which serializes to any `io::Write`
//! such as a File or a TCP stream.
//!
//! ```edition2018
//! use serde::{Deserialize, Serialize};
//! use serde_json::Result;
//!
//! #[derive(Serialize, Deserialize)]
//! struct Address {
//! street: String,
//! city: String,
//! }
//!
//! fn print_an_address() -> Result<()> {
//! // Some data structure.
//! let address = Address {
//! street: "10 Downing Street".to_owned(),
//! city: "London".to_owned(),
//! };
//!
//! // Serialize it to a JSON string.
//! let j = serde_json::to_string(&address)?;
//!
//! // Print, write to a file, or send to an HTTP server.
//! println!("{}", j);
//!
//! Ok(())
//! }
//! #
//! # fn main() {
//! # print_an_address().unwrap();
//! # }
//! ```
//!
//! Any type that implements Serde's `Serialize` trait can be serialized this
//! way. This includes built-in Rust standard library types like `Vec<T>` and
//! `HashMap<K, V>`, as well as any structs or enums annotated with
//! `#[derive(Serialize)]`.
//!
//! # No-std support
//!
//! This crate currently requires the Rust standard library. For JSON support in
//! Serde without a standard library, please see the [`serde-json-core`] crate.
//!
//! [value]: https://docs.serde.rs/serde_json/value/enum.Value.html
//! [from_str]: https://docs.serde.rs/serde_json/de/fn.from_str.html
//! [from_slice]: https://docs.serde.rs/serde_json/de/fn.from_slice.html
//! [from_reader]: https://docs.serde.rs/serde_json/de/fn.from_reader.html
//! [to_string]: https://docs.serde.rs/serde_json/ser/fn.to_string.html
//! [to_vec]: https://docs.serde.rs/serde_json/ser/fn.to_vec.html
//! [to_writer]: https://docs.serde.rs/serde_json/ser/fn.to_writer.html
//! [macro]: https://docs.serde.rs/serde_json/macro.json.html
//! [`serde-json-core`]: https://japaric.github.io/serde-json-core/serde_json_core/
#![doc(html_root_url = "https://docs.rs/serde_json/1.0.39")]
#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
// Ignored clippy lints
#![cfg_attr(feature = "cargo-clippy", allow(deprecated_cfg_attr, doc_markdown))]
// Ignored clippy_pedantic lints
#![cfg_attr(feature = "cargo-clippy", allow(
// Deserializer::from_str, into_iter
should_implement_trait,
// integer and float ser/de requires these sorts of casts
cast_possible_wrap,
cast_precision_loss,
cast_sign_loss,
// things are often more readable this way
cast_lossless,
module_name_repetitions,
shadow_unrelated,
single_match_else,
use_self,
zero_prefixed_literal,
// we support older compilers
redundant_field_names,
))]
#![deny(missing_docs)]
#[macro_use]
extern crate serde;
#[cfg(feature = "preserve_order")]
extern crate indexmap;
extern crate itoa;
extern crate ryu;
#[doc(inline)]
pub use self::de::{from_reader, from_slice, from_str, Deserializer, StreamDeserializer};
#[doc(inline)]
pub use self::error::{Error, Result};
#[doc(inline)]
pub use self::ser::{
to_string, to_string_pretty, to_vec, to_vec_pretty, to_writer, to_writer_pretty, Serializer,
};
#[doc(inline)]
pub use self::value::{from_value, to_value, Map, Number, Value};
// We only use our own error type; no need for From conversions provided by the
// standard library's try! macro. This reduces lines of LLVM IR by 4%.
macro_rules! try {
($e:expr) => {
match $e {
::std::result::Result::Ok(val) => val,
::std::result::Result::Err(err) => return ::std::result::Result::Err(err),
}
};
}
#[macro_use]
mod macros;
pub mod de;
pub mod error;
pub mod map;
pub mod ser;
pub mod value;
mod iter;
mod number;
mod read;
#[cfg(feature = "raw_value")]
mod raw;
Ignore bare_trait_objects lint to support old compilers
//! # Serde JSON
//!
//! JSON is a ubiquitous open-standard format that uses human-readable text to
//! transmit data objects consisting of key-value pairs.
//!
//! ```json
//! {
//! "name": "John Doe",
//! "age": 43,
//! "address": {
//! "street": "10 Downing Street",
//! "city": "London"
//! },
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }
//! ```
//!
//! There are three common ways that you might find yourself needing to work
//! with JSON data in Rust.
//!
//! - **As text data.** An unprocessed string of JSON data that you receive on
//! an HTTP endpoint, read from a file, or prepare to send to a remote
//! server.
//! - **As an untyped or loosely typed representation.** Maybe you want to
//! check that some JSON data is valid before passing it on, but without
//! knowing the structure of what it contains. Or you want to do very basic
//! manipulations like insert a key in a particular spot.
//! - **As a strongly typed Rust data structure.** When you expect all or most
//! of your data to conform to a particular structure and want to get real
//! work done without JSON's loosey-goosey nature tripping you up.
//!
//! Serde JSON provides efficient, flexible, safe ways of converting data
//! between each of these representations.
//!
//! # Operating on untyped JSON values
//!
//! Any valid JSON data can be manipulated in the following recursive enum
//! representation. This data structure is [`serde_json::Value`][value].
//!
//! ```edition2018
//! # use serde_json::{Number, Map};
//! #
//! # #[allow(dead_code)]
//! enum Value {
//! Null,
//! Bool(bool),
//! Number(Number),
//! String(String),
//! Array(Vec<Value>),
//! Object(Map<String, Value>),
//! }
//! ```
//!
//! A string of JSON data can be parsed into a `serde_json::Value` by the
//! [`serde_json::from_str`][from_str] function. There is also
//! [`from_slice`][from_slice] for parsing from a byte slice &[u8] and
//! [`from_reader`][from_reader] for parsing from any `io::Read` like a File or
//! a TCP stream.
//!
//! ```edition2018
//! use serde_json::{Result, Value};
//!
//! fn untyped_example() -> Result<()> {
//! // Some JSON input data as a &str. Maybe this comes from the user.
//! let data = r#"
//! {
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }"#;
//!
//! // Parse the string of data into serde_json::Value.
//! let v: Value = serde_json::from_str(data)?;
//!
//! // Access parts of the data by indexing with square brackets.
//! println!("Please call {} at the number {}", v["name"], v["phones"][0]);
//!
//! Ok(())
//! }
//! #
//! # fn main() {
//! # untyped_example().unwrap();
//! # }
//! ```
//!
//! The result of square bracket indexing like `v["name"]` is a borrow of the
//! data at that index, so the type is `&Value`. A JSON map can be indexed with
//! string keys, while a JSON array can be indexed with integer keys. If the
//! type of the data is not right for the type with which it is being indexed,
//! or if a map does not contain the key being indexed, or if the index into a
//! vector is out of bounds, the returned element is `Value::Null`.
//!
//! When a `Value` is printed, it is printed as a JSON string. So in the code
//! above, the output looks like `Please call "John Doe" at the number "+44
//! 1234567"`. The quotation marks appear because `v["name"]` is a `&Value`
//! containing a JSON string and its JSON representation is `"John Doe"`.
//! Printing as a plain string without quotation marks involves converting from
//! a JSON string to a Rust string with [`as_str()`] or avoiding the use of
//! `Value` as described in the following section.
//!
//! [`as_str()`]: https://docs.serde.rs/serde_json/enum.Value.html#method.as_str
//!
//! The `Value` representation is sufficient for very basic tasks but can be
//! tedious to work with for anything more significant. Error handling is
//! verbose to implement correctly, for example imagine trying to detect the
//! presence of unrecognized fields in the input data. The compiler is powerless
//! to help you when you make a mistake, for example imagine typoing `v["name"]`
//! as `v["nmae"]` in one of the dozens of places it is used in your code.
//!
//! # Parsing JSON as strongly typed data structures
//!
//! Serde provides a powerful way of mapping JSON data into Rust data structures
//! largely automatically.
//!
//! ```edition2018
//! use serde::{Deserialize, Serialize};
//! use serde_json::Result;
//!
//! #[derive(Serialize, Deserialize)]
//! struct Person {
//! name: String,
//! age: u8,
//! phones: Vec<String>,
//! }
//!
//! fn typed_example() -> Result<()> {
//! // Some JSON input data as a &str. Maybe this comes from the user.
//! let data = r#"
//! {
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }"#;
//!
//! // Parse the string of data into a Person object. This is exactly the
//! // same function as the one that produced serde_json::Value above, but
//! // now we are asking it for a Person as output.
//! let p: Person = serde_json::from_str(data)?;
//!
//! // Do things just like with any other Rust data structure.
//! println!("Please call {} at the number {}", p.name, p.phones[0]);
//!
//! Ok(())
//! }
//! #
//! # fn main() {
//! # typed_example().unwrap();
//! # }
//! ```
//!
//! This is the same `serde_json::from_str` function as before, but this time we
//! assign the return value to a variable of type `Person` so Serde will
//! automatically interpret the input data as a `Person` and produce informative
//! error messages if the layout does not conform to what a `Person` is expected
//! to look like.
//!
//! Any type that implements Serde's `Deserialize` trait can be deserialized
//! this way. This includes built-in Rust standard library types like `Vec<T>`
//! and `HashMap<K, V>`, as well as any structs or enums annotated with
//! `#[derive(Deserialize)]`.
//!
//! Once we have `p` of type `Person`, our IDE and the Rust compiler can help us
//! use it correctly like they do for any other Rust code. The IDE can
//! autocomplete field names to prevent typos, which was impossible in the
//! `serde_json::Value` representation. And the Rust compiler can check that
//! when we write `p.phones[0]`, then `p.phones` is guaranteed to be a
//! `Vec<String>` so indexing into it makes sense and produces a `String`.
//!
//! # Constructing JSON values
//!
//! Serde JSON provides a [`json!` macro][macro] to build `serde_json::Value`
//! objects with very natural JSON syntax.
//!
//! ```edition2018
//! use serde_json::json;
//!
//! fn main() {
//! // The type of `john` is `serde_json::Value`
//! let john = json!({
//! "name": "John Doe",
//! "age": 43,
//! "phones": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! });
//!
//! println!("first phone number: {}", john["phones"][0]);
//!
//! // Convert to a string of JSON and print it out
//! println!("{}", john.to_string());
//! }
//! ```
//!
//! The `Value::to_string()` function converts a `serde_json::Value` into a
//! `String` of JSON text.
//!
//! One neat thing about the `json!` macro is that variables and expressions can
//! be interpolated directly into the JSON value as you are building it. Serde
//! will check at compile time that the value you are interpolating is able to
//! be represented as JSON.
//!
//! ```edition2018
//! # use serde_json::json;
//! #
//! # fn random_phone() -> u16 { 0 }
//! #
//! let full_name = "John Doe";
//! let age_last_year = 42;
//!
//! // The type of `john` is `serde_json::Value`
//! let john = json!({
//! "name": full_name,
//! "age": age_last_year + 1,
//! "phones": [
//! format!("+44 {}", random_phone())
//! ]
//! });
//! ```
//!
//! This is amazingly convenient but we have the problem we had before with
//! `Value` which is that the IDE and Rust compiler cannot help us if we get it
//! wrong. Serde JSON provides a better way of serializing strongly-typed data
//! structures into JSON text.
//!
//! # Creating JSON by serializing data structures
//!
//! A data structure can be converted to a JSON string by
//! [`serde_json::to_string`][to_string]. There is also
//! [`serde_json::to_vec`][to_vec] which serializes to a `Vec<u8>` and
//! [`serde_json::to_writer`][to_writer] which serializes to any `io::Write`
//! such as a File or a TCP stream.
//!
//! ```edition2018
//! use serde::{Deserialize, Serialize};
//! use serde_json::Result;
//!
//! #[derive(Serialize, Deserialize)]
//! struct Address {
//! street: String,
//! city: String,
//! }
//!
//! fn print_an_address() -> Result<()> {
//! // Some data structure.
//! let address = Address {
//! street: "10 Downing Street".to_owned(),
//! city: "London".to_owned(),
//! };
//!
//! // Serialize it to a JSON string.
//! let j = serde_json::to_string(&address)?;
//!
//! // Print, write to a file, or send to an HTTP server.
//! println!("{}", j);
//!
//! Ok(())
//! }
//! #
//! # fn main() {
//! # print_an_address().unwrap();
//! # }
//! ```
//!
//! Any type that implements Serde's `Serialize` trait can be serialized this
//! way. This includes built-in Rust standard library types like `Vec<T>` and
//! `HashMap<K, V>`, as well as any structs or enums annotated with
//! `#[derive(Serialize)]`.
//!
//! # No-std support
//!
//! This crate currently requires the Rust standard library. For JSON support in
//! Serde without a standard library, please see the [`serde-json-core`] crate.
//!
//! [value]: https://docs.serde.rs/serde_json/value/enum.Value.html
//! [from_str]: https://docs.serde.rs/serde_json/de/fn.from_str.html
//! [from_slice]: https://docs.serde.rs/serde_json/de/fn.from_slice.html
//! [from_reader]: https://docs.serde.rs/serde_json/de/fn.from_reader.html
//! [to_string]: https://docs.serde.rs/serde_json/ser/fn.to_string.html
//! [to_vec]: https://docs.serde.rs/serde_json/ser/fn.to_vec.html
//! [to_writer]: https://docs.serde.rs/serde_json/ser/fn.to_writer.html
//! [macro]: https://docs.serde.rs/serde_json/macro.json.html
//! [`serde-json-core`]: https://japaric.github.io/serde-json-core/serde_json_core/
#![doc(html_root_url = "https://docs.rs/serde_json/1.0.39")]
#![allow(unknown_lints, bare_trait_objects)]
#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
// Ignored clippy lints
#![cfg_attr(feature = "cargo-clippy", allow(deprecated_cfg_attr, doc_markdown))]
// Ignored clippy_pedantic lints
#![cfg_attr(feature = "cargo-clippy", allow(
// Deserializer::from_str, into_iter
should_implement_trait,
// integer and float ser/de requires these sorts of casts
cast_possible_wrap,
cast_precision_loss,
cast_sign_loss,
// things are often more readable this way
cast_lossless,
module_name_repetitions,
shadow_unrelated,
single_match_else,
use_self,
zero_prefixed_literal,
// we support older compilers
redundant_field_names,
))]
#![deny(missing_docs)]
#[macro_use]
extern crate serde;
#[cfg(feature = "preserve_order")]
extern crate indexmap;
extern crate itoa;
extern crate ryu;
#[doc(inline)]
pub use self::de::{from_reader, from_slice, from_str, Deserializer, StreamDeserializer};
#[doc(inline)]
pub use self::error::{Error, Result};
#[doc(inline)]
pub use self::ser::{
to_string, to_string_pretty, to_vec, to_vec_pretty, to_writer, to_writer_pretty, Serializer,
};
#[doc(inline)]
pub use self::value::{from_value, to_value, Map, Number, Value};
// We only use our own error type; no need for From conversions provided by the
// standard library's try! macro. This reduces lines of LLVM IR by 4%.
macro_rules! try {
($e:expr) => {
match $e {
::std::result::Result::Ok(val) => val,
::std::result::Result::Err(err) => return ::std::result::Result::Err(err),
}
};
}
#[macro_use]
mod macros;
pub mod de;
pub mod error;
pub mod map;
pub mod ser;
pub mod value;
mod iter;
mod number;
mod read;
#[cfg(feature = "raw_value")]
mod raw;
|
#![feature(no_std, lang_items, asm)]
#![no_std]
pub mod xen;
pub mod arch;
pub mod utils;
extern {
fn main(_: isize, _: *const *const u8) -> isize;
}
// 8KB
const STACK_SIZE: usize = 8192;
#[no_mangle]
#[allow(non_upper_case_globals)]
#[link_section=".stack"]
pub static rust_stack: [u8; STACK_SIZE] = [0; STACK_SIZE];
fn init() {
arch::init();
}
#[no_mangle]
pub fn uni_rust_entry() {
init();
unsafe {
let _ = main(0, core::ptr::null());
}
}
uni: Virtual machine is now correctly powered off
#![feature(no_std, lang_items, asm)]
#![no_std]
pub mod xen;
pub mod arch;
pub mod utils;
extern {
fn main(_: isize, _: *const *const u8) -> isize;
}
// 8KB
const STACK_SIZE: usize = 8192;
#[no_mangle]
#[allow(non_upper_case_globals)]
#[link_section=".stack"]
pub static rust_stack: [u8; STACK_SIZE] = [0; STACK_SIZE];
fn init() {
arch::init();
}
#[no_mangle]
pub fn uni_rust_entry() -> ! {
let app_ret;
init();
unsafe {
app_ret = main(0, core::ptr::null());
}
xen::sched::poweroff(app_ret as arch::defs::Ulong);
panic!("Failed to poweroff the machine !");
}
|
#![feature(ip_addr)]
pub mod ipnetwork;
Add some crate attributes
#![crate_type = "lib"]
#![crate_name = "ipnetwork"]
#![feature(ip_addr)]
pub mod ipnetwork;
|
/*!
This crate provides a number of conversion traits with more specific semantics than those provided by `as` or `From`/`Into`.
The goal with the traits provided here is to be more specific about what generic code can rely on, as well as provide reasonably self-describing alternatives to the standard `From`/`Into` traits. For example, the although `T: From<U>` might be satisfied, it imposes no restrictions on the *kind* of conversion being implemented. As such, the traits in this crate try to be very specific about what conversions are allowed. This makes them less generally applicable, but more useful where they *do* apply.
In addition, `From`/`Into` requires all conversions to succeed or panic. All conversion traits in this crate define an associated error type, allowing code to react to failed conversions as appropriate.
# API Stability Notice
The API of this crate is still not entirely decided.
# Overview
The following traits are used to define various conversion semantics:
- [`ApproxFrom`](./trait.ApproxFrom.html)/[`ApproxInto`](./trait.ApproxInto.html) - approximate conversions, with selectable approximation scheme (see [`ApproxScheme`](./trait.ApproxScheme.html)).
- [`TryFrom`](./trait.TryFrom.html)/[`TryInto`](./trait.TryInto.html) - general, potentially failing value conversions.
- [`ValueFrom`](./trait.ValueFrom.html)/[`ValueInto`](./trait.ValueInto.html) - exact, value-preserving conversions.
These extension methods are provided to help with some common cases:
- [`ConvUtil::approx_as<Dst>`](./trait.ConvUtil.html#method.approx_as) - approximates to `Dst` with the `DefaultApprox` scheme.
- [`ConvUtil::approx_as_by<Dst, S>`](./trait.ConvUtil.html#method.approx_as_by) - approximates to `Dst` with the scheme `S`.
- [`ConvUtil::try_as<Dst>`](./trait.ConvUtil.html#method.try_as) - converts to `Dst` using `TryInto::try_into`.
- [`ConvUtil::value_as<Dst>`](./trait.ConvUtil.html#method.value_as) - converts to `Dst` using `ValueInto::value_into`.
- [`ConvAsUtil::approx`](./trait.ConvAsUtil.html#method.approx) - approximates to an inferred destination type with the `DefaultApprox` scheme.
- [`ConvAsUtil::approx_by<S>`](./trait.ConvAsUtil.html#method.approx_by) - approximates to an inferred destination type with the scheme `S`.
- [`UnwrapOk::unwrap_ok`](./errors/trait.UnwrapOk.html#tymethod.unwrap_ok) - unwraps results from conversions that cannot fail.
- [`UnwrapOrInf::unwrap_or_inf`](./errors/trait.UnwrapOrInf.html#tymethod.unwrap_or_inf) - saturates to ±∞ on failure.
- [`UnwrapOrInvalid::unwrap_or_invalid`](./errors/trait.UnwrapOrInvalid.html#tymethod.unwrap_or_invalid) - substitutes the target type's "invalid" sentinel value on failure.
- [`UnwrapOrSaturate::unwrap_or_saturate`](./errors/trait.UnwrapOrSaturate.html#tymethod.unwrap_or_saturate) - saturates to the maximum or minimum value of the target type on failure.
A macro is provided to assist in implementing conversions:
- [`TryFrom!`](./macros/index.html#tryfrom!) - derives an implementation of [`TryFrom`](./trait.TryFrom.html).
If you are implementing your own types, you may also be interested in the traits contained in the [`misc`](./misc/index.html) module.
## Provided Implementations
The crate provides several blanket implementations:
- `*From<A> for A` (all types can be converted from and into themselves).
- `*Into<Dst> for Src where Dst: *From<Src>` (`*From` implementations imply a matching `*Into` implementation).
Conversions for the builtin numeric (integer and floating point) types are provided. In general, `ValueFrom` conversions exist for all pairs except for float → integer (since such a conversion is generally unlikely to *exactly* succeed) and `f64 → f32` (for the same reason). `ApproxFrom` conversions with the `DefaultApprox` scheme exist between all pairs. `ApproxFrom` with the `Wrapping` scheme exist between integers.
## Errors
A number of error types are defined in the [`errors`](./errors/index.html) module. Generally, conversions use whichever error type most *narrowly* defines the kinds of failures that can occur. For example:
- `ValueFrom<u8> for u16` cannot possibly fail, and as such it uses `NoError`.
- `ValueFrom<i8> for u16` can *only* fail with an underflow, thus it uses the `Underflow` type.
- `ValueFrom<i32> for u16` can underflow *or* overflow, hence it uses `RangeError`.
- Finally, `ApproxFrom<f32> for u16` can underflow, overflow, or attempt to convert NaN; `FloatError` covers those three cases.
Because there are *numerous* error types, the `GeneralError` enum is provided. `From<E, T> for GeneralError<T>` exists for each error type `E<T>` defined by this crate (even for `NoError`!), allowing errors to be translated automatically by `try!`. In fact, all errors can be "expanded" to *all* more general forms (*e.g.* `NoError` → `Underflow`, `Overflow` → `RangeError` → `FloatError`).
Aside from `NoError`, the various error types wrap the input value that you attempted to convert. This is so that non-`Copy` types do not need to be pre-emptively cloned prior to conversion, just in case the conversion fails. A downside is that this means there are many, *many* incompatible error types.
To help alleviate this, there is also `GeneralErrorKind`, which is simply `GeneralError<T>` without the payload, and all errors can be converted into it directly.
The reason for not just using `GeneralErrorKind` in the first place is to statically reduce the number of potential error cases you need to deal with. It also allows the `Unwrap*` extension traits to be defined *without* the possibility for runtime failure (*e.g.* you cannot use `unwrap_or_saturate` with a `FloatError`, because what do you do if the error is `NotANumber`; saturate to max or to min? Or panic?).
# Examples
```
# extern crate conv;
# use conv::*;
# fn main() {
// This *cannot* fail, so we can use `unwrap_ok` to discard the `Result`.
assert_eq!(u8::value_from(0u8).unwrap_ok(), 0u8);
// This *can* fail. Specifically, it can underflow.
assert_eq!(u8::value_from(0i8), Ok(0u8));
assert_eq!(u8::value_from(-1i8), Err(Underflow(-1)));
// This can underflow *and* overflow; hence the change to `RangeError`.
assert_eq!(u8::value_from(-1i16), Err(RangeError::Underflow(-1)));
assert_eq!(u8::value_from(0i16), Ok(0u8));
assert_eq!(u8::value_from(256i16), Err(RangeError::Overflow(256)));
// We can use the extension traits to simplify this a little.
assert_eq!(u8::value_from(-1i16).unwrap_or_saturate(), 0u8);
assert_eq!(u8::value_from(0i16).unwrap_or_saturate(), 0u8);
assert_eq!(u8::value_from(256i16).unwrap_or_saturate(), 255u8);
// Obviously, all integers can be "approximated" using the default scheme (it
// doesn't *do* anything), but they can *also* be approximated with the
// `Wrapping` scheme.
assert_eq!(
<u8 as ApproxFrom<_, DefaultApprox>>::approx_from(400u16),
Err(Overflow(400)));
assert_eq!(
<u8 as ApproxFrom<_, Wrapping>>::approx_from(400u16),
Ok(144u8));
// This is rather inconvenient; as such, there are a number of convenience
// extension methods available via `ConvUtil` and `ConvAsUtil`.
assert_eq!(400u16.approx(), Err::<u8, _>(Overflow(400)));
assert_eq!(400u16.approx_by::<Wrapping>(), Ok::<u8, _>(144u8));
assert_eq!(400u16.approx_as::<u8>(), Err(Overflow(400)));
assert_eq!(400u16.approx_as_by::<u8, Wrapping>(), Ok(144));
// Integer -> float conversions *can* fail due to limited precision.
// Once the continuous range of exactly representable integers is exceeded, the
// provided implementations fail with over/underflow errors.
assert_eq!(f32::value_from(16_777_216i32), Ok(16_777_216.0f32));
assert_eq!(f32::value_from(16_777_217i32), Err(RangeError::Overflow(16_777_217)));
// Float -> integer conversions have to be done using approximations. Although
// exact conversions are *possible*, "advertising" this with an implementation
// is misleading.
//
// Note that `DefaultApprox` for float -> integer uses whatever rounding
// mode is currently active (*i.e.* whatever `as` would do).
assert_eq!(41.0f32.approx(), Ok(41u8));
assert_eq!(41.3f32.approx(), Ok(41u8));
assert_eq!(41.5f32.approx(), Ok(41u8));
assert_eq!(41.8f32.approx(), Ok(41u8));
assert_eq!(42.0f32.approx(), Ok(42u8));
assert_eq!(255.0f32.approx(), Ok(255u8));
assert_eq!(256.0f32.approx(), Err::<u8, _>(FloatError::Overflow(256.0)));
// If you really don't care about the specific kind of error, you can just rely
// on automatic conversion to `GeneralErrorKind`.
fn too_many_errors() -> Result<(), GeneralErrorKind> {
assert_eq!({let r: u8 = try!(0u8.value_into()); r}, 0u8);
assert_eq!({let r: u8 = try!(0i8.value_into()); r}, 0u8);
assert_eq!({let r: u8 = try!(0i16.value_into()); r}, 0u8);
assert_eq!({let r: u8 = try!(0.0f32.approx()); r}, 0u8);
Ok(())
}
# let _ = too_many_errors();
# }
```
*/
#![deny(missing_docs)]
#[macro_use] extern crate custom_derive;
// Exported macros.
pub mod macros;
pub use errors::{
NoError, GeneralError, GeneralErrorKind, Unrepresentable,
Underflow, Overflow,
FloatError, RangeError, RangeErrorKind,
UnwrapOk, UnwrapOrInf, UnwrapOrInvalid, UnwrapOrSaturate,
};
/**
Publicly re-exports the most generally useful set of items.
Usage of the prelude should be considered **unstable**. Although items will likely *not* be removed without bumping the major version, new items *may* be added, which could potentially cause name conflicts in user code.
*/
pub mod prelude {
pub use super::{
ApproxFrom, ApproxInto,
ValueFrom, ValueInto,
UnwrapOk, UnwrapOrInf, UnwrapOrInvalid, UnwrapOrSaturate,
ConvUtil, ConvAsUtil,
};
}
macro_rules! as_item {
($($i:item)*) => {$($i)*};
}
macro_rules! item_for_each {
(
$( ($($arg:tt)*) ),* $(,)* => { $($exp:tt)* }
) => {
macro_rules! body {
$($exp)*
}
$(
body! { $($arg)* }
)*
};
}
pub mod errors;
pub mod misc;
mod impls;
/**
This trait is used to perform a conversion that is permitted to approximate the result, but *not* to wrap or saturate the result to fit into the destination type's representable range.
# Details
All implementations of this trait must provide a conversion that can be separated into two logical steps: an approximation transform, and a representation transform.
The "approximation transform" step involves transforming the input value into an approximately equivalent value which is supported by the target type *without* taking the target type's representable range into account. For example, this might involve rounding or truncating a floating point value to an integer, or reducing the accuracy of a floating point value.
The "representation transform" step *exactly* rewrites the value from the source type's binary representation into the destination type's binary representation. This step *may not* transform the value in any way. If the result of the approximation is not representable, the conversion *must* fail.
The major reason for this formulation is to exactly define what happens when converting between floating point and integer types. Often, it is unclear what happens to floating point values beyond the range of the target integer type. Do they saturate, wrap, or cause a failure?
With this formulation, it is well-defined: if a floating point value is outside the representable range, the conversion fails. This allows users to distinguish between approximation and range violation, and act accordingly.
*/
pub trait ApproxFrom<Src, Scheme=DefaultApprox>: Sized where Scheme: ApproxScheme {
/// The error type produced by a failed conversion.
type Err;
/// Convert the given value into an approximately equivalent representation.
fn approx_from(src: Src) -> Result<Self, Self::Err>;
}
impl<Src, Scheme> ApproxFrom<Src, Scheme> for Src where Scheme: ApproxScheme {
type Err = NoError;
fn approx_from(src: Src) -> Result<Self, Self::Err> {
Ok(src)
}
}
/**
This is the dual of `ApproxFrom`; see that trait for information.
*/
pub trait ApproxInto<Dst, Scheme=DefaultApprox> where Scheme: ApproxScheme {
/// The error type produced by a failed conversion.
type Err;
/// Convert the subject into an approximately equivalent representation.
fn approx_into(self) -> Result<Dst, Self::Err>;
}
impl<Dst, Src, Scheme> ApproxInto<Dst, Scheme> for Src
where
Dst: ApproxFrom<Src, Scheme>,
Scheme: ApproxScheme,
{
type Err = Dst::Err;
fn approx_into(self) -> Result<Dst, Self::Err> {
ApproxFrom::approx_from(self)
}
}
/**
This trait is used to mark approximation scheme types.
*/
pub trait ApproxScheme {}
/**
The "default" approximation scheme. This scheme does whatever would generally be expected of a lossy conversion, assuming no additional context or instruction is given.
This is a double-edged sword: it has the loosest semantics, but is far more likely to exist than more complicated approximation schemes.
*/
pub enum DefaultApprox {}
impl ApproxScheme for DefaultApprox {}
/**
This scheme is used to convert a value by "wrapping" it into a narrower range.
In abstract, this can be viewed as the opposite of rounding: rather than preserving the most significant bits of a value, it preserves the *least* significant bits of a value.
*/
pub enum Wrapping {}
impl ApproxScheme for Wrapping {}
/**
This scheme is used to convert a value by rounding it to the nearest representable value, with ties rounding away from zero.
*/
pub enum RoundToNearest {}
impl ApproxScheme for RoundToNearest {}
/**
This scheme is used to convert a value by rounding it toward negative infinity to the nearest representable value.
*/
pub enum RoundToNegInf {}
impl ApproxScheme for RoundToNegInf {}
/**
This scheme is used to convert a value by rounding it toward positive infinity to the nearest representable value.
*/
pub enum RoundToPosInf {}
impl ApproxScheme for RoundToPosInf {}
/**
This scheme is used to convert a value by rounding it toward zero to the nearest representable value.
*/
pub enum RoundToZero {}
impl ApproxScheme for RoundToZero {}
/**
This trait is used to perform a conversion between different semantic types which might fail.
# Details
Typically, this should be used in cases where you are converting between values whose ranges and/or representations only partially overlap. That the conversion may fail should be a reasonably expected outcome. A standard example of this is converting from integers to enums of unitary variants.
*/
pub trait TryFrom<Src>: Sized {
/// The error type produced by a failed conversion.
type Err;
/// Convert the given value into the subject type.
fn try_from(src: Src) -> Result<Self, Self::Err>;
}
impl<Src> TryFrom<Src> for Src {
type Err = NoError;
fn try_from(src: Src) -> Result<Self, Self::Err> {
Ok(src)
}
}
/**
This is the dual of `TryFrom`; see that trait for information.
*/
pub trait TryInto<Dst> {
/// The error type produced by a failed conversion.
type Err;
/// Convert the subject into the destination type.
fn try_into(self) -> Result<Dst, Self::Err>;
}
impl<Src, Dst> TryInto<Dst> for Src where Dst: TryFrom<Src> {
type Err = Dst::Err;
fn try_into(self) -> Result<Dst, Self::Err> {
TryFrom::try_from(self)
}
}
/**
This trait is used to perform an exact, value-preserving conversion.
# Details
Implementations of this trait should be reflexive, associative and commutative (in the absence of conversion errors). That is, all possible cycles of `ValueFrom` conversions (for which each "step" has a defined implementation) should produce the same result, with a given value either being "round-tripped" exactly, or an error being produced.
*/
pub trait ValueFrom<Src>: Sized {
/// The error type produced by a failed conversion.
type Err;
/// Convert the given value into an exactly equivalent representation.
fn value_from(src: Src) -> Result<Self, Self::Err>;
}
impl<Src> ValueFrom<Src> for Src {
type Err = NoError;
fn value_from(src: Src) -> Result<Self, Self::Err> {
Ok(src)
}
}
/**
This is the dual of `ValueFrom`; see that trait for information.
*/
pub trait ValueInto<Dst> {
/// The error type produced by a failed conversion.
type Err;
/// Convert the subject into an exactly equivalent representation.
fn value_into(self) -> Result<Dst, Self::Err>;
}
impl<Src, Dst> ValueInto<Dst> for Src where Dst: ValueFrom<Src> {
type Err = Dst::Err;
fn value_into(self) -> Result<Dst, Self::Err> {
ValueFrom::value_from(self)
}
}
/**
This extension trait exists to simplify using various conversions.
If there is more than one implementation for a given type/trait pair, a simple call to `*_into` may not be uniquely resolvable. Due to the position of the type parameter (on the trait itself), it is cumbersome to specify the destination type. A similar problem exists for approximation schemes.
See also the [`ConvAsUtil`](./trait.ConvAsUtil.html) trait.
> **Note**: There appears to be a bug in `rustdoc`'s output. This trait is implemented *for all* types, though the methods are only available for types where the appropriate conversions are defined.
*/
pub trait ConvUtil {
/// Approximate the subject to a given type with the default scheme.
fn approx_as<Dst>(self) -> Result<Dst, Self::Err>
where Self: Sized + ApproxInto<Dst> {
self.approx_into()
}
/// Approximate the subject to a given type with a specific scheme.
fn approx_as_by<Dst, Scheme>(self) -> Result<Dst, Self::Err>
where
Self: Sized + ApproxInto<Dst, Scheme>,
Scheme: ApproxScheme,
{
self.approx_into()
}
/// Attempt to convert the subject to a given type.
fn try_as<Dst>(self) -> Result<Dst, Self::Err>
where Self: Sized + TryInto<Dst> {
self.try_into()
}
/// Attempt a value conversion of the subject to a given type.
fn value_as<Dst>(self) -> Result<Dst, Self::Err>
where Self: Sized + ValueInto<Dst> {
self.value_into()
}
}
impl<T> ConvUtil for T {}
/**
This extension trait exists to simplify using various conversions.
If there is more than one `ApproxFrom` implementation for a given type, a simple call to `approx_into` may not be uniquely resolvable. Due to the position of the scheme parameter (on the trait itself), it is cumbersome to specify which scheme you wanted.
The destination type is inferred from context.
See also the [`ConvUtil`](./trait.ConvUtil.html) trait.
> **Note**: There appears to be a bug in `rustdoc`'s output. This trait is implemented *for all* types, though the methods are only available for types where the appropriate conversions are defined.
*/
pub trait ConvAsUtil<Dst> {
/// Approximate the subject with the default scheme.
fn approx(self) -> Result<Dst, Self::Err>
where Self: Sized + ApproxInto<Dst> {
self.approx_into()
}
/// Approximate the subject with a specific scheme.
fn approx_by<Scheme>(self) -> Result<Dst, Self::Err>
where
Self: Sized + ApproxInto<Dst, Scheme>,
Scheme: ApproxScheme,
{
self.approx_into()
}
}
impl<T, Dst> ConvAsUtil<Dst> for T {}
Expanded prelude: GeneralError*, and three rounding modes.
(RoundToNearest, RoundToZero, Wrapping).
/*!
This crate provides a number of conversion traits with more specific semantics than those provided by `as` or `From`/`Into`.
The goal with the traits provided here is to be more specific about what generic code can rely on, as well as provide reasonably self-describing alternatives to the standard `From`/`Into` traits. For example, the although `T: From<U>` might be satisfied, it imposes no restrictions on the *kind* of conversion being implemented. As such, the traits in this crate try to be very specific about what conversions are allowed. This makes them less generally applicable, but more useful where they *do* apply.
In addition, `From`/`Into` requires all conversions to succeed or panic. All conversion traits in this crate define an associated error type, allowing code to react to failed conversions as appropriate.
# API Stability Notice
The API of this crate is still not entirely decided.
# Overview
The following traits are used to define various conversion semantics:
- [`ApproxFrom`](./trait.ApproxFrom.html)/[`ApproxInto`](./trait.ApproxInto.html) - approximate conversions, with selectable approximation scheme (see [`ApproxScheme`](./trait.ApproxScheme.html)).
- [`TryFrom`](./trait.TryFrom.html)/[`TryInto`](./trait.TryInto.html) - general, potentially failing value conversions.
- [`ValueFrom`](./trait.ValueFrom.html)/[`ValueInto`](./trait.ValueInto.html) - exact, value-preserving conversions.
These extension methods are provided to help with some common cases:
- [`ConvUtil::approx_as<Dst>`](./trait.ConvUtil.html#method.approx_as) - approximates to `Dst` with the `DefaultApprox` scheme.
- [`ConvUtil::approx_as_by<Dst, S>`](./trait.ConvUtil.html#method.approx_as_by) - approximates to `Dst` with the scheme `S`.
- [`ConvUtil::try_as<Dst>`](./trait.ConvUtil.html#method.try_as) - converts to `Dst` using `TryInto::try_into`.
- [`ConvUtil::value_as<Dst>`](./trait.ConvUtil.html#method.value_as) - converts to `Dst` using `ValueInto::value_into`.
- [`ConvAsUtil::approx`](./trait.ConvAsUtil.html#method.approx) - approximates to an inferred destination type with the `DefaultApprox` scheme.
- [`ConvAsUtil::approx_by<S>`](./trait.ConvAsUtil.html#method.approx_by) - approximates to an inferred destination type with the scheme `S`.
- [`UnwrapOk::unwrap_ok`](./errors/trait.UnwrapOk.html#tymethod.unwrap_ok) - unwraps results from conversions that cannot fail.
- [`UnwrapOrInf::unwrap_or_inf`](./errors/trait.UnwrapOrInf.html#tymethod.unwrap_or_inf) - saturates to ±∞ on failure.
- [`UnwrapOrInvalid::unwrap_or_invalid`](./errors/trait.UnwrapOrInvalid.html#tymethod.unwrap_or_invalid) - substitutes the target type's "invalid" sentinel value on failure.
- [`UnwrapOrSaturate::unwrap_or_saturate`](./errors/trait.UnwrapOrSaturate.html#tymethod.unwrap_or_saturate) - saturates to the maximum or minimum value of the target type on failure.
A macro is provided to assist in implementing conversions:
- [`TryFrom!`](./macros/index.html#tryfrom!) - derives an implementation of [`TryFrom`](./trait.TryFrom.html).
If you are implementing your own types, you may also be interested in the traits contained in the [`misc`](./misc/index.html) module.
## Provided Implementations
The crate provides several blanket implementations:
- `*From<A> for A` (all types can be converted from and into themselves).
- `*Into<Dst> for Src where Dst: *From<Src>` (`*From` implementations imply a matching `*Into` implementation).
Conversions for the builtin numeric (integer and floating point) types are provided. In general, `ValueFrom` conversions exist for all pairs except for float → integer (since such a conversion is generally unlikely to *exactly* succeed) and `f64 → f32` (for the same reason). `ApproxFrom` conversions with the `DefaultApprox` scheme exist between all pairs. `ApproxFrom` with the `Wrapping` scheme exist between integers.
## Errors
A number of error types are defined in the [`errors`](./errors/index.html) module. Generally, conversions use whichever error type most *narrowly* defines the kinds of failures that can occur. For example:
- `ValueFrom<u8> for u16` cannot possibly fail, and as such it uses `NoError`.
- `ValueFrom<i8> for u16` can *only* fail with an underflow, thus it uses the `Underflow` type.
- `ValueFrom<i32> for u16` can underflow *or* overflow, hence it uses `RangeError`.
- Finally, `ApproxFrom<f32> for u16` can underflow, overflow, or attempt to convert NaN; `FloatError` covers those three cases.
Because there are *numerous* error types, the `GeneralError` enum is provided. `From<E, T> for GeneralError<T>` exists for each error type `E<T>` defined by this crate (even for `NoError`!), allowing errors to be translated automatically by `try!`. In fact, all errors can be "expanded" to *all* more general forms (*e.g.* `NoError` → `Underflow`, `Overflow` → `RangeError` → `FloatError`).
Aside from `NoError`, the various error types wrap the input value that you attempted to convert. This is so that non-`Copy` types do not need to be pre-emptively cloned prior to conversion, just in case the conversion fails. A downside is that this means there are many, *many* incompatible error types.
To help alleviate this, there is also `GeneralErrorKind`, which is simply `GeneralError<T>` without the payload, and all errors can be converted into it directly.
The reason for not just using `GeneralErrorKind` in the first place is to statically reduce the number of potential error cases you need to deal with. It also allows the `Unwrap*` extension traits to be defined *without* the possibility for runtime failure (*e.g.* you cannot use `unwrap_or_saturate` with a `FloatError`, because what do you do if the error is `NotANumber`; saturate to max or to min? Or panic?).
# Examples
```
# extern crate conv;
# use conv::*;
# fn main() {
// This *cannot* fail, so we can use `unwrap_ok` to discard the `Result`.
assert_eq!(u8::value_from(0u8).unwrap_ok(), 0u8);
// This *can* fail. Specifically, it can underflow.
assert_eq!(u8::value_from(0i8), Ok(0u8));
assert_eq!(u8::value_from(-1i8), Err(Underflow(-1)));
// This can underflow *and* overflow; hence the change to `RangeError`.
assert_eq!(u8::value_from(-1i16), Err(RangeError::Underflow(-1)));
assert_eq!(u8::value_from(0i16), Ok(0u8));
assert_eq!(u8::value_from(256i16), Err(RangeError::Overflow(256)));
// We can use the extension traits to simplify this a little.
assert_eq!(u8::value_from(-1i16).unwrap_or_saturate(), 0u8);
assert_eq!(u8::value_from(0i16).unwrap_or_saturate(), 0u8);
assert_eq!(u8::value_from(256i16).unwrap_or_saturate(), 255u8);
// Obviously, all integers can be "approximated" using the default scheme (it
// doesn't *do* anything), but they can *also* be approximated with the
// `Wrapping` scheme.
assert_eq!(
<u8 as ApproxFrom<_, DefaultApprox>>::approx_from(400u16),
Err(Overflow(400)));
assert_eq!(
<u8 as ApproxFrom<_, Wrapping>>::approx_from(400u16),
Ok(144u8));
// This is rather inconvenient; as such, there are a number of convenience
// extension methods available via `ConvUtil` and `ConvAsUtil`.
assert_eq!(400u16.approx(), Err::<u8, _>(Overflow(400)));
assert_eq!(400u16.approx_by::<Wrapping>(), Ok::<u8, _>(144u8));
assert_eq!(400u16.approx_as::<u8>(), Err(Overflow(400)));
assert_eq!(400u16.approx_as_by::<u8, Wrapping>(), Ok(144));
// Integer -> float conversions *can* fail due to limited precision.
// Once the continuous range of exactly representable integers is exceeded, the
// provided implementations fail with over/underflow errors.
assert_eq!(f32::value_from(16_777_216i32), Ok(16_777_216.0f32));
assert_eq!(f32::value_from(16_777_217i32), Err(RangeError::Overflow(16_777_217)));
// Float -> integer conversions have to be done using approximations. Although
// exact conversions are *possible*, "advertising" this with an implementation
// is misleading.
//
// Note that `DefaultApprox` for float -> integer uses whatever rounding
// mode is currently active (*i.e.* whatever `as` would do).
assert_eq!(41.0f32.approx(), Ok(41u8));
assert_eq!(41.3f32.approx(), Ok(41u8));
assert_eq!(41.5f32.approx(), Ok(41u8));
assert_eq!(41.8f32.approx(), Ok(41u8));
assert_eq!(42.0f32.approx(), Ok(42u8));
assert_eq!(255.0f32.approx(), Ok(255u8));
assert_eq!(256.0f32.approx(), Err::<u8, _>(FloatError::Overflow(256.0)));
// If you really don't care about the specific kind of error, you can just rely
// on automatic conversion to `GeneralErrorKind`.
fn too_many_errors() -> Result<(), GeneralErrorKind> {
assert_eq!({let r: u8 = try!(0u8.value_into()); r}, 0u8);
assert_eq!({let r: u8 = try!(0i8.value_into()); r}, 0u8);
assert_eq!({let r: u8 = try!(0i16.value_into()); r}, 0u8);
assert_eq!({let r: u8 = try!(0.0f32.approx()); r}, 0u8);
Ok(())
}
# let _ = too_many_errors();
# }
```
*/
#![deny(missing_docs)]
#[macro_use] extern crate custom_derive;
// Exported macros.
pub mod macros;
pub use errors::{
NoError, GeneralError, GeneralErrorKind, Unrepresentable,
Underflow, Overflow,
FloatError, RangeError, RangeErrorKind,
UnwrapOk, UnwrapOrInf, UnwrapOrInvalid, UnwrapOrSaturate,
};
/**
Publicly re-exports the most generally useful set of items.
Usage of the prelude should be considered **unstable**. Although items will likely *not* be removed without bumping the major version, new items *may* be added, which could potentially cause name conflicts in user code.
*/
pub mod prelude {
pub use super::{
ApproxFrom, ApproxInto,
ValueFrom, ValueInto,
GeneralError, GeneralErrorKind,
UnwrapOk, UnwrapOrInf, UnwrapOrInvalid, UnwrapOrSaturate,
ConvUtil, ConvAsUtil,
RoundToNearest, RoundToZero, Wrapping,
};
}
macro_rules! as_item {
($($i:item)*) => {$($i)*};
}
macro_rules! item_for_each {
(
$( ($($arg:tt)*) ),* $(,)* => { $($exp:tt)* }
) => {
macro_rules! body {
$($exp)*
}
$(
body! { $($arg)* }
)*
};
}
pub mod errors;
pub mod misc;
mod impls;
/**
This trait is used to perform a conversion that is permitted to approximate the result, but *not* to wrap or saturate the result to fit into the destination type's representable range.
# Details
All implementations of this trait must provide a conversion that can be separated into two logical steps: an approximation transform, and a representation transform.
The "approximation transform" step involves transforming the input value into an approximately equivalent value which is supported by the target type *without* taking the target type's representable range into account. For example, this might involve rounding or truncating a floating point value to an integer, or reducing the accuracy of a floating point value.
The "representation transform" step *exactly* rewrites the value from the source type's binary representation into the destination type's binary representation. This step *may not* transform the value in any way. If the result of the approximation is not representable, the conversion *must* fail.
The major reason for this formulation is to exactly define what happens when converting between floating point and integer types. Often, it is unclear what happens to floating point values beyond the range of the target integer type. Do they saturate, wrap, or cause a failure?
With this formulation, it is well-defined: if a floating point value is outside the representable range, the conversion fails. This allows users to distinguish between approximation and range violation, and act accordingly.
*/
pub trait ApproxFrom<Src, Scheme=DefaultApprox>: Sized where Scheme: ApproxScheme {
/// The error type produced by a failed conversion.
type Err;
/// Convert the given value into an approximately equivalent representation.
fn approx_from(src: Src) -> Result<Self, Self::Err>;
}
impl<Src, Scheme> ApproxFrom<Src, Scheme> for Src where Scheme: ApproxScheme {
type Err = NoError;
fn approx_from(src: Src) -> Result<Self, Self::Err> {
Ok(src)
}
}
/**
This is the dual of `ApproxFrom`; see that trait for information.
*/
pub trait ApproxInto<Dst, Scheme=DefaultApprox> where Scheme: ApproxScheme {
/// The error type produced by a failed conversion.
type Err;
/// Convert the subject into an approximately equivalent representation.
fn approx_into(self) -> Result<Dst, Self::Err>;
}
impl<Dst, Src, Scheme> ApproxInto<Dst, Scheme> for Src
where
Dst: ApproxFrom<Src, Scheme>,
Scheme: ApproxScheme,
{
type Err = Dst::Err;
fn approx_into(self) -> Result<Dst, Self::Err> {
ApproxFrom::approx_from(self)
}
}
/**
This trait is used to mark approximation scheme types.
*/
pub trait ApproxScheme {}
/**
The "default" approximation scheme. This scheme does whatever would generally be expected of a lossy conversion, assuming no additional context or instruction is given.
This is a double-edged sword: it has the loosest semantics, but is far more likely to exist than more complicated approximation schemes.
*/
pub enum DefaultApprox {}
impl ApproxScheme for DefaultApprox {}
/**
This scheme is used to convert a value by "wrapping" it into a narrower range.
In abstract, this can be viewed as the opposite of rounding: rather than preserving the most significant bits of a value, it preserves the *least* significant bits of a value.
*/
pub enum Wrapping {}
impl ApproxScheme for Wrapping {}
/**
This scheme is used to convert a value by rounding it to the nearest representable value, with ties rounding away from zero.
*/
pub enum RoundToNearest {}
impl ApproxScheme for RoundToNearest {}
/**
This scheme is used to convert a value by rounding it toward negative infinity to the nearest representable value.
*/
pub enum RoundToNegInf {}
impl ApproxScheme for RoundToNegInf {}
/**
This scheme is used to convert a value by rounding it toward positive infinity to the nearest representable value.
*/
pub enum RoundToPosInf {}
impl ApproxScheme for RoundToPosInf {}
/**
This scheme is used to convert a value by rounding it toward zero to the nearest representable value.
*/
pub enum RoundToZero {}
impl ApproxScheme for RoundToZero {}
/**
This trait is used to perform a conversion between different semantic types which might fail.
# Details
Typically, this should be used in cases where you are converting between values whose ranges and/or representations only partially overlap. That the conversion may fail should be a reasonably expected outcome. A standard example of this is converting from integers to enums of unitary variants.
*/
pub trait TryFrom<Src>: Sized {
/// The error type produced by a failed conversion.
type Err;
/// Convert the given value into the subject type.
fn try_from(src: Src) -> Result<Self, Self::Err>;
}
impl<Src> TryFrom<Src> for Src {
type Err = NoError;
fn try_from(src: Src) -> Result<Self, Self::Err> {
Ok(src)
}
}
/**
This is the dual of `TryFrom`; see that trait for information.
*/
pub trait TryInto<Dst> {
/// The error type produced by a failed conversion.
type Err;
/// Convert the subject into the destination type.
fn try_into(self) -> Result<Dst, Self::Err>;
}
impl<Src, Dst> TryInto<Dst> for Src where Dst: TryFrom<Src> {
type Err = Dst::Err;
fn try_into(self) -> Result<Dst, Self::Err> {
TryFrom::try_from(self)
}
}
/**
This trait is used to perform an exact, value-preserving conversion.
# Details
Implementations of this trait should be reflexive, associative and commutative (in the absence of conversion errors). That is, all possible cycles of `ValueFrom` conversions (for which each "step" has a defined implementation) should produce the same result, with a given value either being "round-tripped" exactly, or an error being produced.
*/
pub trait ValueFrom<Src>: Sized {
/// The error type produced by a failed conversion.
type Err;
/// Convert the given value into an exactly equivalent representation.
fn value_from(src: Src) -> Result<Self, Self::Err>;
}
impl<Src> ValueFrom<Src> for Src {
type Err = NoError;
fn value_from(src: Src) -> Result<Self, Self::Err> {
Ok(src)
}
}
/**
This is the dual of `ValueFrom`; see that trait for information.
*/
pub trait ValueInto<Dst> {
/// The error type produced by a failed conversion.
type Err;
/// Convert the subject into an exactly equivalent representation.
fn value_into(self) -> Result<Dst, Self::Err>;
}
impl<Src, Dst> ValueInto<Dst> for Src where Dst: ValueFrom<Src> {
type Err = Dst::Err;
fn value_into(self) -> Result<Dst, Self::Err> {
ValueFrom::value_from(self)
}
}
/**
This extension trait exists to simplify using various conversions.
If there is more than one implementation for a given type/trait pair, a simple call to `*_into` may not be uniquely resolvable. Due to the position of the type parameter (on the trait itself), it is cumbersome to specify the destination type. A similar problem exists for approximation schemes.
See also the [`ConvAsUtil`](./trait.ConvAsUtil.html) trait.
> **Note**: There appears to be a bug in `rustdoc`'s output. This trait is implemented *for all* types, though the methods are only available for types where the appropriate conversions are defined.
*/
pub trait ConvUtil {
/// Approximate the subject to a given type with the default scheme.
fn approx_as<Dst>(self) -> Result<Dst, Self::Err>
where Self: Sized + ApproxInto<Dst> {
self.approx_into()
}
/// Approximate the subject to a given type with a specific scheme.
fn approx_as_by<Dst, Scheme>(self) -> Result<Dst, Self::Err>
where
Self: Sized + ApproxInto<Dst, Scheme>,
Scheme: ApproxScheme,
{
self.approx_into()
}
/// Attempt to convert the subject to a given type.
fn try_as<Dst>(self) -> Result<Dst, Self::Err>
where Self: Sized + TryInto<Dst> {
self.try_into()
}
/// Attempt a value conversion of the subject to a given type.
fn value_as<Dst>(self) -> Result<Dst, Self::Err>
where Self: Sized + ValueInto<Dst> {
self.value_into()
}
}
impl<T> ConvUtil for T {}
/**
This extension trait exists to simplify using various conversions.
If there is more than one `ApproxFrom` implementation for a given type, a simple call to `approx_into` may not be uniquely resolvable. Due to the position of the scheme parameter (on the trait itself), it is cumbersome to specify which scheme you wanted.
The destination type is inferred from context.
See also the [`ConvUtil`](./trait.ConvUtil.html) trait.
> **Note**: There appears to be a bug in `rustdoc`'s output. This trait is implemented *for all* types, though the methods are only available for types where the appropriate conversions are defined.
*/
pub trait ConvAsUtil<Dst> {
/// Approximate the subject with the default scheme.
fn approx(self) -> Result<Dst, Self::Err>
where Self: Sized + ApproxInto<Dst> {
self.approx_into()
}
/// Approximate the subject with a specific scheme.
fn approx_by<Scheme>(self) -> Result<Dst, Self::Err>
where
Self: Sized + ApproxInto<Dst, Scheme>,
Scheme: ApproxScheme,
{
self.approx_into()
}
}
impl<T, Dst> ConvAsUtil<Dst> for T {}
|
//! An outline of how linear types could be combined with JS-managed data
//!
//! The goals are:
//!
//! 1. Ensure that JS objects are only accessed in the right JS compartment.
//! 2. Support stack-allocated roots.
//! 3. Remove the need for the rooting lint.
//! 4. Don't require rooting in code that can't perform GC.
//! 5. Allow `&mut T` access to JS-managed data, so we don't need as much interior mutability.
//!
//! # JS-managed data
//!
//! The idea is that Rust data can be given to JS to manage, and then accessed,
//! using the JS context. This is passed as a variable of type `JSContext<S>`,
//! where the type parameter `S` is used to track the state of the context.
//!
//! For example, we can give JS some Rust data to manage in compartment
//! `C` when the context state implements the `CanAlloc<C>` trait:
//!
//! ```rust
//! # use linjs::*;
//! fn example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAlloc<C>,
//! {
//! let x: JSManaged<C, String> = cx.manage(String::from("hello"));
//! }
//! ```
//!
//! JS-managed data in compartment `C` can be accessed if the context state
//! implements the `CanAccess<C>` trait:
//!
//! ```rust
//! # use linjs::*;
//! fn example<C, S>(cx: &mut JSContext<S>, x: JSManaged<C, String>) where
//! S: CanAccess<C>,
//! {
//! println!("{} world", x.get(cx));
//! }
//! ```
//!
//! # Lifetimes of JS-managed data
//!
//! Unfortunately, combining these two examples is not memory-safe, due to
//! garbage collection:
//!
//! ```rust,ignore
//! # use linjs::*;
//! fn unsafe_example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAlloc<C> + CanAccess<C>,
//! {
//! let x: JSManaged<C, String> = cx.manage(String::from("hello"));
//! // Imagine something triggers GC here
//! println!("{} world", x.get(cx));
//! }
//! ```
//!
//! This example is not safe, as there is nothing keeping `x` alive in JS,
//! so if garbage collection is triggered, then `x` will be reclaimed
//! which will drop the Rust data, and so the call to `x.get(cx)` will be a use-after-free.
//!
//! This example is not memory-safe, and fortunately fails to typecheck:
//!
//! ```text
//! error[E0502]: cannot borrow `*cx` as immutable because it is also borrowed as mutable
//! --> <anon>:7:32
//! |
//! 5 | let x: JSManaged<C, String> = cx.manage(String::from("hello"));
//! | -- mutable borrow occurs here
//! 6 | // Imagine something triggers GC here
//! 7 | println!("{} world", x.get(cx));
//! | ^^ immutable borrow occurs here
//! 8 | }
//! | - mutable borrow ends here
//! ```
//!
//! To see why this example fails to typecheck, we can introduce explicit lifetimes:
//!
//! ```rust,ignore
//! # use linjs::*;
//! fn unsafe_example<'a, C, S>(cx: &'a mut JSContext<S>) where
//! S: CanAlloc<C> + CanAccess<C>,
//! {
//! // x has type JSManaged<'b, C, String>
//! let x = cx.manage(String::from("hello"));
//! // Imagine something triggers GC here
//! // x_ref has type &'c String
//! let x_ref = x.get(cx);
//! println!("{} world", x_ref);
//! }
//! ```
//!
//! We can now see why this fails to typecheck: since `cx` is borrowed mutably at type
//! `&'b mut JSContext<S>`, then immutably at type `&'c mut JSContext<S>` these lifetimes
//! cannot overlap, but the call to `x.get(cx)` requires them to overlap. These contradicting
//! constraints cause the example to fail to compile.
//!
//! # Rooting
//!
//! To fix this example, we need to make sure that `x` lives long enough. One way to do this is
//! to root `x`, so that it will not be garbage collected.
//!
//! ```rust
//! # use linjs::*;
//! fn example<'a, C: 'a, S>(cx: &'a mut JSContext<S>) where
//! S: CanAlloc<C> + CanAccess<C> + CanRoot,
//! {
//! // Function body has lifetime 'b
//! // x has type JSManaged<'b, C, String>
//! rooted!(in(cx) let x = cx.manage(String::from("hello")));
//! // Imagine something triggers GC here
//! // x_ref has type &'c String
//! let x_ref = x.get(cx);
//! println!("{} world", x_ref);
//! }
//! ```
//!
//! This example is now safe, since `x` is rooted during its access.
//! The example typechecks because the root has lifetime `'b`, and there is
//! no constraint that `'b` and `'c` don't overlap.
//! This use of lifetimes allows safe access to JS-managed data without a special
//! rooting lint.
//!
//! # JS-manageable data
//!
//! JS-managed lifetimes are variant, so can be converted to a more constrained
//! lifetime, for example if `'b` is a sublifetime of `'a`:
//!
//! ```rust
//! # use linjs::*;
//! type JSHandle<'a, C, T> = JSManaged<'a, C, JSManaged<'a, C, T>>;
//! fn example<'a, 'b, C>(handle: JSHandle<'a, C, String>) -> JSHandle<'b, C, String> where
//! 'a: 'b,
//! {
//! handle
//! }
//! ```
//! This use of variance is fine for monomorphic code, but sometimes polymorphic code
//! needs to change the lifetime of a type parameter. For this reason, we provide
//! a `JSManageable` trait. If `T: JSManageable<'a, C>` then `T` is a type whose lifetime
//! can be changed to `'a`. For example:
//!
//! ```rust
//! # use linjs::*;
//! type JSHandle<'a, C, T> = JSManaged<'a, C, JSManaged<'a, C, T>>;
//! fn example<'a, 'b, C, T>(handle: JSHandle<'a, C, T>) -> JSHandle<'b, C, T::Aged> where
//! 'a: 'b,
//! C: 'b,
//! T: JSManageable<'b, C>,
//! {
//! handle.contract_lifetime()
//! }
//! ```
//!
//! This trait can be derived, using the `#[derive(JSManageable)]` type annotation.
//!
//! # Mutating JS-managed data
//!
//! JS managed data can be accessed mutably as well as immutably.
//! This is safe because mutably accessing JS manage data requires
//! mutably borrowing the JS context, so there cannot be two simultaneous
//! mutable accesses.
//!
//! ```rust
//! # use linjs::*;
//! fn example<C, S>(cx: &mut JSContext<S>, x: JSManaged<C, String>) where
//! S: CanAccess<C>,
//! {
//! println!("{} world", x.get(cx));
//! *x.get_mut(cx) = String::from("hi");
//! println!("{} world", x.get(cx));
//! }
//! ```
//!
//! An attempt to mutably access JS managed data more than once simultaneously
//! results in an error from the borrow-checker, for example:
//!
//! ```rust,ignore
//! # use linjs::*; use std::mem;
//! fn unsafe_example<C, S>(cx: &mut JSContext<S>, x: JSManaged<C, String>, y: JSManaged<C, String>) where
//! S: CanAccess<C>,
//! {
//! mem::swap(x.get_mut(cx), y.get_mut(cx));
//! }
//! ```
//!
//! ```text
//! error[E0499]: cannot borrow `*cx` as mutable more than once at a time
//! --> <anon>:7:40
//! |
//! 7 | mem::swap(x.get_mut(cx), y.get_mut(cx));
//! | -- ^^ - first borrow ends here
//! | | |
//! | | second mutable borrow occurs here
//! | first mutable borrow occurs here
//! ```
//!
//! Mutable update allows the construction of cyclic structures, for example:
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeLoop<'a, C> {
//! next: Option<Loop<'a, C>>,
//! }
//! type Loop<'a, C> = JSManaged<'a, C, NativeLoop<'a, C>>;
//! fn example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C> + CanRoot,
//! {
//! rooted!(in(cx) let l = cx.manage(NativeLoop { next: None }));
//! l.get_mut(cx).next = Some(l);
//! }
//! # fn main() {}
//! ```
//!
//! # Snapshots
//!
//! Some cases of building JS managed data require rooting, but in some cases
//! the rooting can be avoided, since the program does nothing to trigger
//! garbage collection. In this case, we can snapshot the JS context after
//! performing allocation. The snapshot supports accessing JS managed data,
//! but does not support any calls that might trigger garbage collection.
//! As a result, we know that any data which is live at the beginning of
//! the snapshot is also live at the end.
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! # #[derive(JSManageable)]
//! # struct NativeLoop<'a, C> {
//! # next: Option<Loop<'a, C>>,
//! # }
//! # type Loop<'a, C> = JSManaged<'a, C, NativeLoop<'a, C>>;
//! fn example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C>
//! {
//! let (ref mut cx, l) = cx.snapshot_manage(NativeLoop { next: None });
//! l.get_mut(cx).next = Some(l);
//! }
//! # fn main() {}
//! ```
//!
//! A program which tries to use a function which might trigger GC will
//! not typecheck, as the snapshotted JS context state does not support
//! the appropriate traits. For example:
//!
//! ```rust,ignore
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! # #[derive(JSManageable)]
//! # struct NativeLoop<'a, C> {
//! # next: Option<Loop<'a, C>>,
//! # }
//! # type Loop<'a, C> = JSManaged<'a, C, NativeLoop<'a, C>>;
//! fn might_trigger_gc<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C>
//! { }
//!
//! fn unsafe_example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C>
//! {
//! let (ref mut cx, l) = cx.snapshot_manage(NativeLoop { next: None });
//! might_trigger_gc(cx);
//! l.get_mut(cx).next = Some(l);
//! }
//! # fn main() {}
//! ```
//!
//! In this program, the function `might_trigger_gc` requires the state
//! to support `CanAlloc<C>`, which is not allowed by the snapshotted state.
//!
//! ```text
//! error[E0277]: the trait bound `linjs::Snapshotted<'_, S>: linjs::CanAlloc<C>` is not satisfied
//! --> <anon>:16:4
//! |
//! 16 | might_trigger_gc(cx);
//! | ^^^^^^^^^^^^^^^^ the trait `linjs::CanAlloc<C>` is not implemented for `linjs::Snapshotted<'_, S>`
//! |
//! = note: required by `might_trigger_gc`
//! ```
//!
//! # Globals
//!
//! JS contexts require initialization. In particular, each compartment has a global,
//! which should be JS managed data. The global can be initialized using `cx.init(value)`,
//! which updates the state of the context from uninitialized to initialized.
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeMyGlobal { name: String }
//! type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal>;
//! type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//!
//! fn example<'a, C, S>(cx: JSContext<S>) -> MyContext<'a, C> where
//! C: 'a,
//! S: CanInitialize<C>,
//! {
//! let name = String::from("Alice");
//! cx.init(NativeMyGlobal { name: name })
//! }
//! # fn main() {}
//! ```
//!
//! The current global can be accessed from the JS context, for example:
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! # struct NativeMyGlobal { name: String }
//! # type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal>;
//! # type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//! #
//! fn example<'a, C, S>(cx: &JSContext<S>) where
//! S: HasGlobal<MyGlobal<'a, C>> + CanAccess<C>,
//! {
//! println!("My global is named {}.", cx.global().get(cx).name);
//! }
//! # fn main() {}
//! ```
//!
//! In some cases, the global contains some JS-managed data, in which case the initialization
//! is split into two steps: pre-initialization creates the global, and post-initialization
//! provides the JS-managed data for the global, for example:
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeMyGlobal<'a, C> { name: JSManaged<'a, C, String> }
//! type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal<'a, C>>;
//! type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//!
//! fn example<'a, C, S>(cx: JSContext<S>) -> MyContext<'a, C> where
//! C: 'a,
//! S: CanInitialize<C>,
//! {
//! let mut cx = cx.pre_init();
//! rooted!(in(cx) let name = cx.manage(String::from("Alice")));
//! cx.post_init(NativeMyGlobal { name: name })
//! }
//! # fn main() {}
//! ```
//!
//! During initialization, it is safe to perform allocation, but
//! not much else, as the global is still uninitialized.
//! For example:
//!
//! ```rust,ignore
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! # #[derive(JSManageable)]
//! # struct NativeMyGlobal<'a, C> { name: JSManaged<'a, C, String> }
//! # type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal<'a, C>>;
//! # type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//! #
//! fn unsafe_example<'a, C, S>(cx: JSContext<S>) -> MyContext<'a, C> where
//! C: 'a,
//! S: CanInitialize<C>,
//! {
//! let mut cx = cx.pre_init();
//! let oops = cx.global().get(&cx).name.get(&cx);
//! rooted!(in(cx) let name = cx.manage(String::from("Alice")));
//! cx.post_init(NativeMyGlobal { name: name })
//! }
//! # fn main() {}
//! ```
//!
//! This code is unsafe, since the global is accessed before it is initialized,
//! but does not typecheck because the context state does not allow accessing
//! JS-managed data during initialization.
//!
//! ```text
//! error[E0277]: the trait bound `linjs::Initializing<linjs::JSManaged<'_, C, _>>: linjs::CanAccess<C>` is not satisfied
//! --> <anon>:14:27
//! |
//! 14 | let oops = cx.global().get(&cx).name.get(&cx);
//! | ^^^ the trait `linjs::CanAccess<C>` is not implemented for `linjs::Initializing<linjs::JSManaged<'_, C, _>>`
//! ```
//!
//! # Bootstrapping
//!
//! To bootstrap initialization, a user defines a type which implements the `JSRunnable` trait.
//! This requires a `run` method, which takes the JS context as an argument. The `JSRunnable`
//! trait provides a `start()` method which calls the `run(cx)` method back with an appropriate
//! context.
//!
//! ```rust
//! # extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeMyGlobal { name: String }
//!
//! struct Example;
//!
//! impl JSRunnable for Example {
//! fn run<C, S>(self, cx: JSContext<S>) where
//! S: CanInitialize<C>,
//! {
//! let name = String::from("Alice"); println!("0");
//! let ref cx = cx.init(NativeMyGlobal { name: name }); println!("A");
//! assert_eq!(cx.global().get(cx).name, "Alice");
//! }
//! }
//!
//! fn main() { Example.start(); }
//! ```
//!
//! #Examples
//!
//! This is an example of building a two-node cyclic graph, which is the smallest
//! example that Rust would need `Rc` and `RefCell` for. Note that this builds
//! the graph with no need for rooting.
//!
//! ```
//! #[macro_use] extern crate linjs;
//! #[macro_use] extern crate linjs_derive;
//! use linjs::{CanAlloc, CanAccess, CanExtend, CanInitialize, CanRoot};
//! use linjs::{JSContext, JSManageable, JSManaged, JSRunnable, JSTraceable};
//!
//! // A graph type
//! type Graph<'a, C> = JSManaged<'a, C, NativeGraph<'a, C>>;
//! #[derive(JSManageable)]
//! struct NativeGraph<'a, C> {
//! nodes: Vec<Node<'a, C>>,
//! }
//!
//! // A node type
//! type Node<'a, C> = JSManaged<'a, C, NativeNode<'a, C>>;
//! #[derive(JSManageable)]
//! struct NativeNode<'a, C> {
//! data: usize,
//! edges: Vec<Node<'a, C>>,
//! }
//!
//! // Build a cyclic graph
//! struct Example;
//! impl JSRunnable for Example {
//! fn run<C, S>(self, cx: JSContext<S>) where
//! S: CanInitialize<C>
//! {
//! let ref mut cx = cx.init(NativeGraph { nodes: vec![] });
//! let graph = cx.global();
//! self.add_node1(cx, graph);
//! self.add_node2(cx, graph);
//! assert_eq!(graph.get(cx).nodes[0].get(cx).data, 1);
//! assert_eq!(graph.get(cx).nodes[1].get(cx).data, 2);
//! let ref mut cx = cx.snapshot();
//! self.add_edges(cx, graph);
//! assert_eq!(graph.get(cx).nodes[0].get(cx).edges[0].get(cx).data, 2);
//! assert_eq!(graph.get(cx).nodes[1].get(cx).edges[0].get(cx).data, 1);
//! }
//! }
//!
//! impl Example {
//! fn add_node1<S, C>(&self, cx: &mut JSContext<S>, graph: Graph<C>) where
//! S: CanAccess<C> + CanAlloc<C> + CanRoot
//! {
//! // Creating nodes does memory allocation, which may trigger GC,
//! // so we need to be careful about lifetimes while they are being added.
//! // Approach 1 is to root the node.
//! rooted!(in(cx) let node1 = cx.manage(NativeNode { data: 1, edges: vec![] }));
//! graph.get_mut(cx).nodes.push(node1);
//! }
//! fn add_node2<S, C>(&self, cx: &mut JSContext<S>, graph: Graph<C>) where
//! S: CanAccess<C> + CanAlloc<C>
//! {
//! // Approach 2 is to take a snapshot of the context right after allocation.
//! let (ref mut cx, node2) = cx.snapshot_manage(NativeNode { data: 2, edges: vec![] });
//! graph.get_mut(cx).nodes.push(node2.contract_lifetime());
//! }
//! fn add_edges<'a, S, C>(&self, cx: &mut JSContext<S>, graph: Graph<C>) where
//! C: 'a,
//! S: CanAccess<C> + CanExtend<'a, C>
//! {
//! // Note that there's no rooting here.
//! let node1 = graph.get(cx).nodes[0].extend_lifetime(cx);
//! let node2 = graph.get(cx).nodes[1].extend_lifetime(cx);
//! node1.get_mut(cx).edges.push(node2.contract_lifetime());
//! node2.get_mut(cx).edges.push(node1.contract_lifetime());
//! }
//! }
//!
//! fn main() { Example.start(); }
//! ```
#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
use std::marker::PhantomData;
use std::mem;
use std::ptr;
/// The type for JS contexts whose current state is `S`.
pub struct JSContext<S> {
state: S,
}
/// A context state in an initialized compartment with global of type `G`.
pub struct Initialized<G> {
global: G,
roots: JSPinnedRoots,
}
/// A context state in snapshotted compartment in underlying state `S`,
/// which guarantees that no GC will happen during the lifetime `'a`.
pub struct Snapshotted<'a, S: 'a> (&'a mut S);
/// A context state in uninitialized compartment `C`.
pub struct Uninitialized<C> (PhantomData<C>);
/// A context state in the middle of initializing a compartment with global of type `G`.
pub struct Initializing<G> {
global: G,
roots: JSPinnedRoots,
}
/// A marker trait for JS contexts that can access native state
pub trait CanAccess<C> {}
impl<'a, C, T> CanAccess<C> for Initialized<JSManaged<'a, C, T>> {}
impl<'a, C, S> CanAccess<C> for Snapshotted<'a, S> where S: CanAccess<C> {}
/// A marker trait for JS contexts that can extend the lifetime of objects
pub trait CanExtend<'a, C> {}
impl<'a, C, S> CanExtend<'a, C> for Snapshotted<'a, S> where S: CanAccess<C> {}
/// A trait for JS contexts that can create roots
pub trait CanRoot {
fn roots(self) -> JSPinnedRoots;
fn roots_ref(&self) -> &JSPinnedRoots;
fn roots_mut(&mut self) -> &mut JSPinnedRoots;
}
impl<G> CanRoot for Initialized<G> {
fn roots(self) -> JSPinnedRoots {
self.roots
}
fn roots_ref(&self) -> &JSPinnedRoots {
&self.roots
}
fn roots_mut(&mut self) -> &mut JSPinnedRoots {
&mut self.roots
}
}
impl<G> CanRoot for Initializing<G> {
fn roots(self) -> JSPinnedRoots {
self.roots
}
fn roots_ref(&self) -> &JSPinnedRoots {
&self.roots
}
fn roots_mut(&mut self) -> &mut JSPinnedRoots {
&mut self.roots
}
}
/// A marker trait for JS contexts that can allocate objects
pub trait CanAlloc<C> {}
impl<'a, C, T> CanAlloc<C> for Initialized<JSManaged<'a, C, T>> {}
impl<'a, C, T> CanAlloc<C> for Initializing<JSManaged<'a, C, T>> {}
/// A marker trait for JS contexts that can be initialized
pub trait CanInitialize<C> {}
impl<C> CanInitialize<C> for Uninitialized<C> {}
/// A marker trait for JS contexts that are in the middle of initializing
pub trait IsInitializing<G>: CanRoot + HasGlobal<G> {}
impl<G: Clone> IsInitializing<G> for Initializing<G> {}
/// A trait for JS contexts that have a global
pub trait HasGlobal<G> {
fn global(&self) -> G;
}
impl<G> HasGlobal<G> for Initialized<G> where
G: Clone,
{
fn global(&self) -> G {
self.global.clone()
}
}
impl<G> HasGlobal<G> for Initializing<G> where
G: Clone,
{
fn global(&self) -> G {
self.global.clone()
}
}
impl<'a, G, S> HasGlobal<G> for Snapshotted<'a, S> where
S: HasGlobal<G>,
{
fn global(&self) -> G {
self.0.global()
}
}
impl<S> JSContext<S> {
/// Get a snapshot of the JS state.
/// The snapshot only allows access to the methods that are guaranteed not to call GC,
/// so we don't need to root JS-managed pointers during the lifetime of a snapshot.
pub fn snapshot<'a>(&'a mut self) -> JSContext<Snapshotted<'a, S>> {
JSContext {
state: Snapshotted(&mut self.state),
}
}
/// Give ownership of data to JS.
/// This allocates JS heap, which may trigger GC.
pub fn manage<'a, C, T>(&'a mut self, value: T) -> JSManaged<'a, C, T::Aged> where
S: CanAlloc<C>,
T: JSManageable<'a, C>
{
// The real thing would use a JS reflector to manage the space,
// this just space-leaks
JSManaged {
raw: Box::into_raw(Box::new(value)) as *mut (),
marker: PhantomData,
}
}
/// Give ownership of data to JS.
/// This allocates JS heap, which may trigger GC.
pub fn snapshot_manage<'a, C, T>(&'a mut self, value: T) -> (JSContext<Snapshotted<'a, S>>, JSManaged<'a, C, T::Aged>) where
S: CanAlloc<C>,
T: JSManageable<'a, C>
{
// The real thing would use a JS reflector to manage the space,
// this just space-leaks
let managed = JSManaged {
raw: Box::into_raw(Box::new(value)) as *mut (),
marker: PhantomData,
};
let snapshot = JSContext {
state: Snapshotted(&mut self.state),
};
(snapshot, managed)
}
/// Initialize a JS Context
pub fn init<'a, C, T>(self, value: T) -> JSContext<Initialized<JSManaged<'a, C, T::Aged>>> where
S: CanInitialize<C>,
T: JSManageable<'a, C>,
{
self.pre_init().post_init(value)
}
/// Prepare a JS context for initialization
pub fn pre_init<'a, C, T>(self) -> JSContext<Initializing<JSManaged<'a, C, T>>> where
S: CanInitialize<C>,
{
// This is dangerous!
// This is only safe because dereferencing this pointer is only done by user code
// in posession of a context whose state is `CanAccess<C>`. The only way a user can
// access such a context is by calling `post_init`, which initializes the raw pointer.
// TODO: check that `Drop` and GC tracing are safe.
// TODO: check the performance of the safer version of this code, which stores an `Option<T>` rather than a `T`.
let boxed: Box<T> = unsafe { Box::new(mem::uninitialized()) };
let raw = Box::into_raw(boxed) as *mut ();
let global = JSManaged {
raw: raw,
marker: PhantomData,
};
JSContext {
state: Initializing {
global: global,
roots: JSPinnedRoots(ptr::null_mut()),
}
}
}
/// Finish initializing a JS Context
pub fn post_init<'a, C, T>(self, value: T) -> JSContext<Initialized<JSManaged<'a, C, T::Aged>>> where
S: IsInitializing<JSManaged<'a, C, T::Aged>>,
T: JSManageable<'a, C>,
{
let global = self.state.global();
let raw = global.raw as *mut T;
let uninitialized = unsafe { mem::replace(&mut *raw, value) };
mem::forget(uninitialized);
JSContext {
state: Initialized {
global: global,
roots: self.state.roots(),
}
}
}
/// Get the global of an initialized context.
pub fn global<G>(&self) -> G where
S: HasGlobal<G>,
G: Clone,
{
self.state.global()
}
/// Create a new root.
pub fn new_root<T>(&mut self) -> JSRoot<T> where
S: CanRoot,
{
JSRoot {
value: None,
pin: JSUntypedPinnedRoot {
value: unsafe { mem::zeroed() },
next: ptr::null_mut(),
prev: ptr::null_mut(),
},
roots: self.state.roots_mut(),
}
}
// A real implementation would also have JS methods such as those in jsapi.
}
/// This is a placeholder for the real JSTraceable trait
pub unsafe trait JSTraceable {
fn as_ptr(&self) -> *const JSTraceable where Self: Sized {
unsafe { mem::transmute(self as &JSTraceable) }
}
fn as_mut_ptr(&mut self) -> *mut JSTraceable where Self: Sized {
unsafe { mem::transmute(self as &mut JSTraceable) }
}
}
unsafe impl JSTraceable for String {}
unsafe impl JSTraceable for usize {}
unsafe impl<T> JSTraceable for Option<T> where T: JSTraceable {}
unsafe impl<T> JSTraceable for Vec<T> where T: JSTraceable {}
// etc.
/// Change the JS-managed lifetime of a type.
/// The real thing would include a JS tracer.
pub unsafe trait JSManageable<'a, C> : JSTraceable {
/// This type should have the same memory represention as `Self`.
/// The only difference between `Self` and `Self::Aged`
/// is that any `JSManaged<'b, C, T>` should be replaced by
/// `JSManaged<'a, C, T::Aged>`.
type Aged: 'a + JSManageable<'a, C, Aged=Self::Aged>;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
let result = mem::transmute_copy(&self);
mem::forget(self);
result
}
unsafe fn change_lifetime_ref(&'a self) -> &'a Self::Aged {
&*(self as *const Self as *const Self::Aged)
}
unsafe fn change_lifetime_mut(&'a mut self) -> &'a mut Self::Aged {
&mut *(self as *mut Self as *mut Self::Aged)
}
fn contract_lifetime(self) -> Self::Aged where Self: 'a + Sized {
unsafe { self.change_lifetime() }
}
fn contract_lifetime_ref(&'a self) -> &'a Self::Aged where Self: 'a {
unsafe { self.change_lifetime_ref() }
}
fn contract_lifetime_mut(&'a mut self) -> &'a mut Self::Aged where Self: 'a {
unsafe { self.change_lifetime_mut() }
}
}
unsafe impl<'a, C> JSManageable<'a, C> for String {
type Aged = String;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
self
}
}
unsafe impl<'a, C> JSManageable<'a, C> for usize {
type Aged = usize;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
self
}
}
unsafe impl<'a, C, T> JSManageable<'a, C> for Vec<T> where T: JSManageable<'a, C> {
type Aged = Vec<T::Aged>;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
mem::transmute(self)
}
}
// etc.
/// A user of a JS runtime implements `JSRunnable`.
pub trait JSRunnable: Sized {
/// This callback is called with a fresh JS compartment type `C`.
fn run<C, S>(self, cx: JSContext<S>) where S: CanInitialize<C>;
/// To trigger the callback, call `rt.start()`.
fn start(self) {
struct JSCompartmentImpl;
let cx = JSContext {
state: Uninitialized(PhantomData),
};
self.run::<JSCompartmentImpl, Uninitialized<JSCompartmentImpl>>(cx);
}
}
/// The type of JS-managed data in a JS compartment `C`, with lifetime `'a`.
///
/// If the user has access to a `JSManaged`, then the JS-managed
/// data is live for the given lifetime.
pub struct JSManaged<'a, C, T: ?Sized> {
// JS reflector goes here
// This raw pointer should really be of type `*mut T`, but that is invariant in T.
// To make the type variant in T, we use a `*mut ()` instead.
raw: *mut (),
marker: PhantomData<(&'a(), C, T)>,
}
impl<'a, C, T: ?Sized> Clone for JSManaged<'a, C, T> {
fn clone(&self) -> Self {
JSManaged {
raw: self.raw,
marker: self.marker,
}
}
}
impl<'a, C, T: ?Sized> Copy for JSManaged<'a, C, T> {
}
unsafe impl<'a, C, T: ?Sized> JSTraceable for JSManaged<'a, C, T> where
T: JSTraceable
{}
unsafe impl<'a, 'b, C: 'b, T: ?Sized> JSManageable<'b, C> for JSManaged<'a, C, T> where
T: JSManageable<'b, C>,
{
type Aged = JSManaged<'b, C, T::Aged>;
}
impl<'a, C, T: ?Sized> JSManaged<'a, C, T> {
/// Read-only access to JS-managed data.
pub fn get<'b, S>(self, _: &'b JSContext<S>) -> &'b T::Aged where
S: CanAccess<C>,
T: JSManageable<'b, C>,
'a: 'b,
{
unsafe { &*(self.raw as *mut T::Aged) }
}
/// Read-write access to JS-managed data.
pub fn get_mut<'b, S>(self, _: &'b mut JSContext<S>) -> &'b mut T::Aged where
S: CanAccess<C>,
T: JSManageable<'b, C>,
'a: 'b,
{
unsafe { &mut *(self.raw as *mut T::Aged) }
}
/// Change the lifetime of JS-managed data.
pub unsafe fn change_lifetime<'b>(self) -> JSManaged<'b, C, T::Aged> where
T: JSManageable<'b, C>,
{
JSManaged {
raw: self.raw,
marker: PhantomData,
}
}
/// It's safe to contract the lifetime of JS-managed data.
pub fn contract_lifetime<'b>(self) -> JSManaged<'b, C, T::Aged> where
T: JSManageable<'b, C>,
'a: 'b,
{
unsafe { self.change_lifetime() }
}
/// It's safe to extend the lifetime of JS-managed data if it has been snapshotted.
pub fn extend_lifetime<'b, 'c, S>(self, _: &'c JSContext<S>) -> JSManaged<'b, C, T::Aged> where
C: 'b,
S: CanExtend<'b, C>,
T: JSManageable<'b, C>,
'b: 'a,
{
unsafe { self.change_lifetime() }
}
}
/// A stack allocated root
pub struct JSRoot<T> {
value: Option<T>,
pin: JSUntypedPinnedRoot,
roots: *mut JSPinnedRoots,
}
/// A stack allocated root that haz been pinned, so the backing store can't move.
pub struct JSPinnedRoot<'a, T:'a> (&'a mut JSRoot<T>);
/// A doubly linked list with all the pinned roots.
#[derive(Eq, PartialEq)]
pub struct JSPinnedRoots(*mut JSUntypedPinnedRoot);
/// A stack allocated root that has been pinned, but we don't have a type for the contents
struct JSUntypedPinnedRoot {
value: *mut JSTraceable,
next: *mut JSUntypedPinnedRoot,
prev: *mut JSUntypedPinnedRoot,
}
impl<T> JSRoot<T> {
// Very annoyingly, this function has to be marked as unsafe,
// because we can't rely on the destructor for the pinned root running.
// See the discussion about `mem::forget` being safe at
// https://github.com/rust-lang/rfcs/pull/1066.
// This is safe as long as it is unpinned before the memory
// is reclaimed, but Rust does not enforce that.
pub unsafe fn pin<'a, C, U>(&'a mut self, value: U) -> JSPinnedRoot<'a, T> where
T: JSManageable<'a, C, Aged=T>,
U: JSManageable<'a, C, Aged=T>,
{
self.value = Some(value.change_lifetime());
self.pin.value = self.value.as_mut_ptr();
self.pin.next = (*self.roots).0;
self.pin.prev = ptr::null_mut();
if let Some(next) = self.pin.next.as_mut() {
next.prev = &mut self.pin;
}
*self.roots = JSPinnedRoots(&mut self.pin);
JSPinnedRoot(self)
}
pub unsafe fn unpin(&mut self) {
if let Some(next) = self.pin.next.as_mut() {
next.prev = self.pin.prev;
}
if let Some(prev) = self.pin.prev.as_mut() {
prev.next = self.pin.next;
}
if *self.roots == JSPinnedRoots(&mut self.pin) {
*self.roots = JSPinnedRoots(self.pin.next);
}
self.value = None;
self.pin.value = mem::zeroed();
self.pin.next = ptr::null_mut();
self.pin.prev = ptr::null_mut();
}
}
impl<'a, T> JSPinnedRoot<'a, T> {
pub fn get<'b, C>(&'b self) -> T::Aged where
T: JSManageable<'b, C>,
T::Aged: Copy,
{
*self.get_ref()
}
pub fn get_ref<'b, C>(&'b self) -> &'b T::Aged where
T: JSManageable<'b, C>,
{
self.0.value.as_ref().unwrap().contract_lifetime_ref()
}
pub fn get_mut<'b, C>(&'b mut self) -> &'b mut T::Aged where
T: JSManageable<'b, C>,
{
self.0.value.as_mut().unwrap().contract_lifetime_mut()
}
}
impl<'a, T> Drop for JSPinnedRoot<'a, T> {
fn drop(&mut self) {
unsafe { self.0.unpin() }
}
}
#[macro_export]
macro_rules! rooted {
(in($cx:expr) let $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let $name = __pinned.get();
);
(in($cx:expr) let mut $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let mut $name = __pinned.get();
);
(in($cx:expr) let ref $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let $name = __pinned.get_ref();
);
(in($cx:expr) let ref mut $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let mut $name = __pinned.get_mut();
)
}
Linear types are a bit of a distraction.
//! An outline of how Rust's ownership, borrowing and lifetimes could be combined with JS-managed data
//!
//! The goals are:
//!
//! 1. Ensure that JS objects are only accessed in the right JS compartment.
//! 2. Support stack-allocated roots.
//! 3. Remove the need for the rooting lint.
//! 4. Don't require rooting in code that can't perform GC.
//! 5. Allow `&mut T` access to JS-managed data, so we don't need as much interior mutability.
//!
//! # JS-managed data
//!
//! The idea is that Rust data can be given to JS to manage, and then accessed,
//! using the JS context. This is passed as a variable of type `JSContext<S>`,
//! where the type parameter `S` is used to track the state of the context.
//!
//! For example, we can give JS some Rust data to manage in compartment
//! `C` when the context state implements the `CanAlloc<C>` trait:
//!
//! ```rust
//! # use linjs::*;
//! fn example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAlloc<C>,
//! {
//! let x: JSManaged<C, String> = cx.manage(String::from("hello"));
//! }
//! ```
//!
//! JS-managed data in compartment `C` can be accessed if the context state
//! implements the `CanAccess<C>` trait:
//!
//! ```rust
//! # use linjs::*;
//! fn example<C, S>(cx: &mut JSContext<S>, x: JSManaged<C, String>) where
//! S: CanAccess<C>,
//! {
//! println!("{} world", x.get(cx));
//! }
//! ```
//!
//! # Lifetimes of JS-managed data
//!
//! Unfortunately, combining these two examples is not memory-safe, due to
//! garbage collection:
//!
//! ```rust,ignore
//! # use linjs::*;
//! fn unsafe_example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAlloc<C> + CanAccess<C>,
//! {
//! let x: JSManaged<C, String> = cx.manage(String::from("hello"));
//! // Imagine something triggers GC here
//! println!("{} world", x.get(cx));
//! }
//! ```
//!
//! This example is not safe, as there is nothing keeping `x` alive in JS,
//! so if garbage collection is triggered, then `x` will be reclaimed
//! which will drop the Rust data, and so the call to `x.get(cx)` will be a use-after-free.
//!
//! This example is not memory-safe, and fortunately fails to typecheck:
//!
//! ```text
//! error[E0502]: cannot borrow `*cx` as immutable because it is also borrowed as mutable
//! --> <anon>:7:32
//! |
//! 5 | let x: JSManaged<C, String> = cx.manage(String::from("hello"));
//! | -- mutable borrow occurs here
//! 6 | // Imagine something triggers GC here
//! 7 | println!("{} world", x.get(cx));
//! | ^^ immutable borrow occurs here
//! 8 | }
//! | - mutable borrow ends here
//! ```
//!
//! To see why this example fails to typecheck, we can introduce explicit lifetimes:
//!
//! ```rust,ignore
//! # use linjs::*;
//! fn unsafe_example<'a, C, S>(cx: &'a mut JSContext<S>) where
//! S: CanAlloc<C> + CanAccess<C>,
//! {
//! // x has type JSManaged<'b, C, String>
//! let x = cx.manage(String::from("hello"));
//! // Imagine something triggers GC here
//! // x_ref has type &'c String
//! let x_ref = x.get(cx);
//! println!("{} world", x_ref);
//! }
//! ```
//!
//! We can now see why this fails to typecheck: since `cx` is borrowed mutably at type
//! `&'b mut JSContext<S>`, then immutably at type `&'c mut JSContext<S>` these lifetimes
//! cannot overlap, but the call to `x.get(cx)` requires them to overlap. These contradicting
//! constraints cause the example to fail to compile.
//!
//! # Rooting
//!
//! To fix this example, we need to make sure that `x` lives long enough. One way to do this is
//! to root `x`, so that it will not be garbage collected.
//!
//! ```rust
//! # use linjs::*;
//! fn example<'a, C: 'a, S>(cx: &'a mut JSContext<S>) where
//! S: CanAlloc<C> + CanAccess<C> + CanRoot,
//! {
//! // Function body has lifetime 'b
//! // x has type JSManaged<'b, C, String>
//! rooted!(in(cx) let x = cx.manage(String::from("hello")));
//! // Imagine something triggers GC here
//! // x_ref has type &'c String
//! let x_ref = x.get(cx);
//! println!("{} world", x_ref);
//! }
//! ```
//!
//! This example is now safe, since `x` is rooted during its access.
//! The example typechecks because the root has lifetime `'b`, and there is
//! no constraint that `'b` and `'c` don't overlap.
//! This use of lifetimes allows safe access to JS-managed data without a special
//! rooting lint.
//!
//! # JS-manageable data
//!
//! JS-managed lifetimes are variant, so can be converted to a more constrained
//! lifetime, for example if `'b` is a sublifetime of `'a`:
//!
//! ```rust
//! # use linjs::*;
//! type JSHandle<'a, C, T> = JSManaged<'a, C, JSManaged<'a, C, T>>;
//! fn example<'a, 'b, C>(handle: JSHandle<'a, C, String>) -> JSHandle<'b, C, String> where
//! 'a: 'b,
//! {
//! handle
//! }
//! ```
//! This use of variance is fine for monomorphic code, but sometimes polymorphic code
//! needs to change the lifetime of a type parameter. For this reason, we provide
//! a `JSManageable` trait. If `T: JSManageable<'a, C>` then `T` is a type whose lifetime
//! can be changed to `'a`. For example:
//!
//! ```rust
//! # use linjs::*;
//! type JSHandle<'a, C, T> = JSManaged<'a, C, JSManaged<'a, C, T>>;
//! fn example<'a, 'b, C, T>(handle: JSHandle<'a, C, T>) -> JSHandle<'b, C, T::Aged> where
//! 'a: 'b,
//! C: 'b,
//! T: JSManageable<'b, C>,
//! {
//! handle.contract_lifetime()
//! }
//! ```
//!
//! This trait can be derived, using the `#[derive(JSManageable)]` type annotation.
//!
//! # Mutating JS-managed data
//!
//! JS managed data can be accessed mutably as well as immutably.
//! This is safe because mutably accessing JS manage data requires
//! mutably borrowing the JS context, so there cannot be two simultaneous
//! mutable accesses.
//!
//! ```rust
//! # use linjs::*;
//! fn example<C, S>(cx: &mut JSContext<S>, x: JSManaged<C, String>) where
//! S: CanAccess<C>,
//! {
//! println!("{} world", x.get(cx));
//! *x.get_mut(cx) = String::from("hi");
//! println!("{} world", x.get(cx));
//! }
//! ```
//!
//! An attempt to mutably access JS managed data more than once simultaneously
//! results in an error from the borrow-checker, for example:
//!
//! ```rust,ignore
//! # use linjs::*; use std::mem;
//! fn unsafe_example<C, S>(cx: &mut JSContext<S>, x: JSManaged<C, String>, y: JSManaged<C, String>) where
//! S: CanAccess<C>,
//! {
//! mem::swap(x.get_mut(cx), y.get_mut(cx));
//! }
//! ```
//!
//! ```text
//! error[E0499]: cannot borrow `*cx` as mutable more than once at a time
//! --> <anon>:7:40
//! |
//! 7 | mem::swap(x.get_mut(cx), y.get_mut(cx));
//! | -- ^^ - first borrow ends here
//! | | |
//! | | second mutable borrow occurs here
//! | first mutable borrow occurs here
//! ```
//!
//! Mutable update allows the construction of cyclic structures, for example:
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeLoop<'a, C> {
//! next: Option<Loop<'a, C>>,
//! }
//! type Loop<'a, C> = JSManaged<'a, C, NativeLoop<'a, C>>;
//! fn example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C> + CanRoot,
//! {
//! rooted!(in(cx) let l = cx.manage(NativeLoop { next: None }));
//! l.get_mut(cx).next = Some(l);
//! }
//! # fn main() {}
//! ```
//!
//! # Snapshots
//!
//! Some cases of building JS managed data require rooting, but in some cases
//! the rooting can be avoided, since the program does nothing to trigger
//! garbage collection. In this case, we can snapshot the JS context after
//! performing allocation. The snapshot supports accessing JS managed data,
//! but does not support any calls that might trigger garbage collection.
//! As a result, we know that any data which is live at the beginning of
//! the snapshot is also live at the end.
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! # #[derive(JSManageable)]
//! # struct NativeLoop<'a, C> {
//! # next: Option<Loop<'a, C>>,
//! # }
//! # type Loop<'a, C> = JSManaged<'a, C, NativeLoop<'a, C>>;
//! fn example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C>
//! {
//! let (ref mut cx, l) = cx.snapshot_manage(NativeLoop { next: None });
//! l.get_mut(cx).next = Some(l);
//! }
//! # fn main() {}
//! ```
//!
//! A program which tries to use a function which might trigger GC will
//! not typecheck, as the snapshotted JS context state does not support
//! the appropriate traits. For example:
//!
//! ```rust,ignore
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! # #[derive(JSManageable)]
//! # struct NativeLoop<'a, C> {
//! # next: Option<Loop<'a, C>>,
//! # }
//! # type Loop<'a, C> = JSManaged<'a, C, NativeLoop<'a, C>>;
//! fn might_trigger_gc<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C>
//! { }
//!
//! fn unsafe_example<C, S>(cx: &mut JSContext<S>) where
//! S: CanAccess<C> + CanAlloc<C>
//! {
//! let (ref mut cx, l) = cx.snapshot_manage(NativeLoop { next: None });
//! might_trigger_gc(cx);
//! l.get_mut(cx).next = Some(l);
//! }
//! # fn main() {}
//! ```
//!
//! In this program, the function `might_trigger_gc` requires the state
//! to support `CanAlloc<C>`, which is not allowed by the snapshotted state.
//!
//! ```text
//! error[E0277]: the trait bound `linjs::Snapshotted<'_, S>: linjs::CanAlloc<C>` is not satisfied
//! --> <anon>:16:4
//! |
//! 16 | might_trigger_gc(cx);
//! | ^^^^^^^^^^^^^^^^ the trait `linjs::CanAlloc<C>` is not implemented for `linjs::Snapshotted<'_, S>`
//! |
//! = note: required by `might_trigger_gc`
//! ```
//!
//! # Globals
//!
//! JS contexts require initialization. In particular, each compartment has a global,
//! which should be JS managed data. The global can be initialized using `cx.init(value)`,
//! which updates the state of the context from uninitialized to initialized.
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeMyGlobal { name: String }
//! type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal>;
//! type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//!
//! fn example<'a, C, S>(cx: JSContext<S>) -> MyContext<'a, C> where
//! C: 'a,
//! S: CanInitialize<C>,
//! {
//! let name = String::from("Alice");
//! cx.init(NativeMyGlobal { name: name })
//! }
//! # fn main() {}
//! ```
//!
//! The current global can be accessed from the JS context, for example:
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! # struct NativeMyGlobal { name: String }
//! # type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal>;
//! # type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//! #
//! fn example<'a, C, S>(cx: &JSContext<S>) where
//! S: HasGlobal<MyGlobal<'a, C>> + CanAccess<C>,
//! {
//! println!("My global is named {}.", cx.global().get(cx).name);
//! }
//! # fn main() {}
//! ```
//!
//! In some cases, the global contains some JS-managed data, in which case the initialization
//! is split into two steps: pre-initialization creates the global, and post-initialization
//! provides the JS-managed data for the global, for example:
//!
//! ```rust
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeMyGlobal<'a, C> { name: JSManaged<'a, C, String> }
//! type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal<'a, C>>;
//! type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//!
//! fn example<'a, C, S>(cx: JSContext<S>) -> MyContext<'a, C> where
//! C: 'a,
//! S: CanInitialize<C>,
//! {
//! let mut cx = cx.pre_init();
//! rooted!(in(cx) let name = cx.manage(String::from("Alice")));
//! cx.post_init(NativeMyGlobal { name: name })
//! }
//! # fn main() {}
//! ```
//!
//! During initialization, it is safe to perform allocation, but
//! not much else, as the global is still uninitialized.
//! For example:
//!
//! ```rust,ignore
//! # #[macro_use] extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! # #[derive(JSManageable)]
//! # struct NativeMyGlobal<'a, C> { name: JSManaged<'a, C, String> }
//! # type MyGlobal<'a, C> = JSManaged<'a, C, NativeMyGlobal<'a, C>>;
//! # type MyContext<'a, C> = JSContext<Initialized<MyGlobal<'a, C>>>;
//! #
//! fn unsafe_example<'a, C, S>(cx: JSContext<S>) -> MyContext<'a, C> where
//! C: 'a,
//! S: CanInitialize<C>,
//! {
//! let mut cx = cx.pre_init();
//! let oops = cx.global().get(&cx).name.get(&cx);
//! rooted!(in(cx) let name = cx.manage(String::from("Alice")));
//! cx.post_init(NativeMyGlobal { name: name })
//! }
//! # fn main() {}
//! ```
//!
//! This code is unsafe, since the global is accessed before it is initialized,
//! but does not typecheck because the context state does not allow accessing
//! JS-managed data during initialization.
//!
//! ```text
//! error[E0277]: the trait bound `linjs::Initializing<linjs::JSManaged<'_, C, _>>: linjs::CanAccess<C>` is not satisfied
//! --> <anon>:14:27
//! |
//! 14 | let oops = cx.global().get(&cx).name.get(&cx);
//! | ^^^ the trait `linjs::CanAccess<C>` is not implemented for `linjs::Initializing<linjs::JSManaged<'_, C, _>>`
//! ```
//!
//! # Bootstrapping
//!
//! To bootstrap initialization, a user defines a type which implements the `JSRunnable` trait.
//! This requires a `run` method, which takes the JS context as an argument. The `JSRunnable`
//! trait provides a `start()` method which calls the `run(cx)` method back with an appropriate
//! context.
//!
//! ```rust
//! # extern crate linjs;
//! # #[macro_use] extern crate linjs_derive;
//! # use linjs::*;
//! #[derive(JSManageable)]
//! struct NativeMyGlobal { name: String }
//!
//! struct Example;
//!
//! impl JSRunnable for Example {
//! fn run<C, S>(self, cx: JSContext<S>) where
//! S: CanInitialize<C>,
//! {
//! let name = String::from("Alice"); println!("0");
//! let ref cx = cx.init(NativeMyGlobal { name: name }); println!("A");
//! assert_eq!(cx.global().get(cx).name, "Alice");
//! }
//! }
//!
//! fn main() { Example.start(); }
//! ```
//!
//! #Examples
//!
//! This is an example of building a two-node cyclic graph, which is the smallest
//! example that Rust would need `Rc` and `RefCell` for. Note that this builds
//! the graph with no need for rooting.
//!
//! ```
//! #[macro_use] extern crate linjs;
//! #[macro_use] extern crate linjs_derive;
//! use linjs::{CanAlloc, CanAccess, CanExtend, CanInitialize, CanRoot};
//! use linjs::{JSContext, JSManageable, JSManaged, JSRunnable, JSTraceable};
//!
//! // A graph type
//! type Graph<'a, C> = JSManaged<'a, C, NativeGraph<'a, C>>;
//! #[derive(JSManageable)]
//! struct NativeGraph<'a, C> {
//! nodes: Vec<Node<'a, C>>,
//! }
//!
//! // A node type
//! type Node<'a, C> = JSManaged<'a, C, NativeNode<'a, C>>;
//! #[derive(JSManageable)]
//! struct NativeNode<'a, C> {
//! data: usize,
//! edges: Vec<Node<'a, C>>,
//! }
//!
//! // Build a cyclic graph
//! struct Example;
//! impl JSRunnable for Example {
//! fn run<C, S>(self, cx: JSContext<S>) where
//! S: CanInitialize<C>
//! {
//! let ref mut cx = cx.init(NativeGraph { nodes: vec![] });
//! let graph = cx.global();
//! self.add_node1(cx, graph);
//! self.add_node2(cx, graph);
//! assert_eq!(graph.get(cx).nodes[0].get(cx).data, 1);
//! assert_eq!(graph.get(cx).nodes[1].get(cx).data, 2);
//! let ref mut cx = cx.snapshot();
//! self.add_edges(cx, graph);
//! assert_eq!(graph.get(cx).nodes[0].get(cx).edges[0].get(cx).data, 2);
//! assert_eq!(graph.get(cx).nodes[1].get(cx).edges[0].get(cx).data, 1);
//! }
//! }
//!
//! impl Example {
//! fn add_node1<S, C>(&self, cx: &mut JSContext<S>, graph: Graph<C>) where
//! S: CanAccess<C> + CanAlloc<C> + CanRoot
//! {
//! // Creating nodes does memory allocation, which may trigger GC,
//! // so we need to be careful about lifetimes while they are being added.
//! // Approach 1 is to root the node.
//! rooted!(in(cx) let node1 = cx.manage(NativeNode { data: 1, edges: vec![] }));
//! graph.get_mut(cx).nodes.push(node1);
//! }
//! fn add_node2<S, C>(&self, cx: &mut JSContext<S>, graph: Graph<C>) where
//! S: CanAccess<C> + CanAlloc<C>
//! {
//! // Approach 2 is to take a snapshot of the context right after allocation.
//! let (ref mut cx, node2) = cx.snapshot_manage(NativeNode { data: 2, edges: vec![] });
//! graph.get_mut(cx).nodes.push(node2.contract_lifetime());
//! }
//! fn add_edges<'a, S, C>(&self, cx: &mut JSContext<S>, graph: Graph<C>) where
//! C: 'a,
//! S: CanAccess<C> + CanExtend<'a, C>
//! {
//! // Note that there's no rooting here.
//! let node1 = graph.get(cx).nodes[0].extend_lifetime(cx);
//! let node2 = graph.get(cx).nodes[1].extend_lifetime(cx);
//! node1.get_mut(cx).edges.push(node2.contract_lifetime());
//! node2.get_mut(cx).edges.push(node1.contract_lifetime());
//! }
//! }
//!
//! fn main() { Example.start(); }
//! ```
#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
use std::marker::PhantomData;
use std::mem;
use std::ptr;
/// The type for JS contexts whose current state is `S`.
pub struct JSContext<S> {
state: S,
}
/// A context state in an initialized compartment with global of type `G`.
pub struct Initialized<G> {
global: G,
roots: JSPinnedRoots,
}
/// A context state in snapshotted compartment in underlying state `S`,
/// which guarantees that no GC will happen during the lifetime `'a`.
pub struct Snapshotted<'a, S: 'a> (&'a mut S);
/// A context state in uninitialized compartment `C`.
pub struct Uninitialized<C> (PhantomData<C>);
/// A context state in the middle of initializing a compartment with global of type `G`.
pub struct Initializing<G> {
global: G,
roots: JSPinnedRoots,
}
/// A marker trait for JS contexts that can access native state
pub trait CanAccess<C> {}
impl<'a, C, T> CanAccess<C> for Initialized<JSManaged<'a, C, T>> {}
impl<'a, C, S> CanAccess<C> for Snapshotted<'a, S> where S: CanAccess<C> {}
/// A marker trait for JS contexts that can extend the lifetime of objects
pub trait CanExtend<'a, C> {}
impl<'a, C, S> CanExtend<'a, C> for Snapshotted<'a, S> where S: CanAccess<C> {}
/// A trait for JS contexts that can create roots
pub trait CanRoot {
fn roots(self) -> JSPinnedRoots;
fn roots_ref(&self) -> &JSPinnedRoots;
fn roots_mut(&mut self) -> &mut JSPinnedRoots;
}
impl<G> CanRoot for Initialized<G> {
fn roots(self) -> JSPinnedRoots {
self.roots
}
fn roots_ref(&self) -> &JSPinnedRoots {
&self.roots
}
fn roots_mut(&mut self) -> &mut JSPinnedRoots {
&mut self.roots
}
}
impl<G> CanRoot for Initializing<G> {
fn roots(self) -> JSPinnedRoots {
self.roots
}
fn roots_ref(&self) -> &JSPinnedRoots {
&self.roots
}
fn roots_mut(&mut self) -> &mut JSPinnedRoots {
&mut self.roots
}
}
/// A marker trait for JS contexts that can allocate objects
pub trait CanAlloc<C> {}
impl<'a, C, T> CanAlloc<C> for Initialized<JSManaged<'a, C, T>> {}
impl<'a, C, T> CanAlloc<C> for Initializing<JSManaged<'a, C, T>> {}
/// A marker trait for JS contexts that can be initialized
pub trait CanInitialize<C> {}
impl<C> CanInitialize<C> for Uninitialized<C> {}
/// A marker trait for JS contexts that are in the middle of initializing
pub trait IsInitializing<G>: CanRoot + HasGlobal<G> {}
impl<G: Clone> IsInitializing<G> for Initializing<G> {}
/// A trait for JS contexts that have a global
pub trait HasGlobal<G> {
fn global(&self) -> G;
}
impl<G> HasGlobal<G> for Initialized<G> where
G: Clone,
{
fn global(&self) -> G {
self.global.clone()
}
}
impl<G> HasGlobal<G> for Initializing<G> where
G: Clone,
{
fn global(&self) -> G {
self.global.clone()
}
}
impl<'a, G, S> HasGlobal<G> for Snapshotted<'a, S> where
S: HasGlobal<G>,
{
fn global(&self) -> G {
self.0.global()
}
}
impl<S> JSContext<S> {
/// Get a snapshot of the JS state.
/// The snapshot only allows access to the methods that are guaranteed not to call GC,
/// so we don't need to root JS-managed pointers during the lifetime of a snapshot.
pub fn snapshot<'a>(&'a mut self) -> JSContext<Snapshotted<'a, S>> {
JSContext {
state: Snapshotted(&mut self.state),
}
}
/// Give ownership of data to JS.
/// This allocates JS heap, which may trigger GC.
pub fn manage<'a, C, T>(&'a mut self, value: T) -> JSManaged<'a, C, T::Aged> where
S: CanAlloc<C>,
T: JSManageable<'a, C>
{
// The real thing would use a JS reflector to manage the space,
// this just space-leaks
JSManaged {
raw: Box::into_raw(Box::new(value)) as *mut (),
marker: PhantomData,
}
}
/// Give ownership of data to JS.
/// This allocates JS heap, which may trigger GC.
pub fn snapshot_manage<'a, C, T>(&'a mut self, value: T) -> (JSContext<Snapshotted<'a, S>>, JSManaged<'a, C, T::Aged>) where
S: CanAlloc<C>,
T: JSManageable<'a, C>
{
// The real thing would use a JS reflector to manage the space,
// this just space-leaks
let managed = JSManaged {
raw: Box::into_raw(Box::new(value)) as *mut (),
marker: PhantomData,
};
let snapshot = JSContext {
state: Snapshotted(&mut self.state),
};
(snapshot, managed)
}
/// Initialize a JS Context
pub fn init<'a, C, T>(self, value: T) -> JSContext<Initialized<JSManaged<'a, C, T::Aged>>> where
S: CanInitialize<C>,
T: JSManageable<'a, C>,
{
self.pre_init().post_init(value)
}
/// Prepare a JS context for initialization
pub fn pre_init<'a, C, T>(self) -> JSContext<Initializing<JSManaged<'a, C, T>>> where
S: CanInitialize<C>,
{
// This is dangerous!
// This is only safe because dereferencing this pointer is only done by user code
// in posession of a context whose state is `CanAccess<C>`. The only way a user can
// access such a context is by calling `post_init`, which initializes the raw pointer.
// TODO: check that `Drop` and GC tracing are safe.
// TODO: check the performance of the safer version of this code, which stores an `Option<T>` rather than a `T`.
let boxed: Box<T> = unsafe { Box::new(mem::uninitialized()) };
let raw = Box::into_raw(boxed) as *mut ();
let global = JSManaged {
raw: raw,
marker: PhantomData,
};
JSContext {
state: Initializing {
global: global,
roots: JSPinnedRoots(ptr::null_mut()),
}
}
}
/// Finish initializing a JS Context
pub fn post_init<'a, C, T>(self, value: T) -> JSContext<Initialized<JSManaged<'a, C, T::Aged>>> where
S: IsInitializing<JSManaged<'a, C, T::Aged>>,
T: JSManageable<'a, C>,
{
let global = self.state.global();
let raw = global.raw as *mut T;
let uninitialized = unsafe { mem::replace(&mut *raw, value) };
mem::forget(uninitialized);
JSContext {
state: Initialized {
global: global,
roots: self.state.roots(),
}
}
}
/// Get the global of an initialized context.
pub fn global<G>(&self) -> G where
S: HasGlobal<G>,
G: Clone,
{
self.state.global()
}
/// Create a new root.
pub fn new_root<T>(&mut self) -> JSRoot<T> where
S: CanRoot,
{
JSRoot {
value: None,
pin: JSUntypedPinnedRoot {
value: unsafe { mem::zeroed() },
next: ptr::null_mut(),
prev: ptr::null_mut(),
},
roots: self.state.roots_mut(),
}
}
// A real implementation would also have JS methods such as those in jsapi.
}
/// This is a placeholder for the real JSTraceable trait
pub unsafe trait JSTraceable {
fn as_ptr(&self) -> *const JSTraceable where Self: Sized {
unsafe { mem::transmute(self as &JSTraceable) }
}
fn as_mut_ptr(&mut self) -> *mut JSTraceable where Self: Sized {
unsafe { mem::transmute(self as &mut JSTraceable) }
}
}
unsafe impl JSTraceable for String {}
unsafe impl JSTraceable for usize {}
unsafe impl<T> JSTraceable for Option<T> where T: JSTraceable {}
unsafe impl<T> JSTraceable for Vec<T> where T: JSTraceable {}
// etc.
/// Change the JS-managed lifetime of a type.
/// The real thing would include a JS tracer.
pub unsafe trait JSManageable<'a, C> : JSTraceable {
/// This type should have the same memory represention as `Self`.
/// The only difference between `Self` and `Self::Aged`
/// is that any `JSManaged<'b, C, T>` should be replaced by
/// `JSManaged<'a, C, T::Aged>`.
type Aged: 'a + JSManageable<'a, C, Aged=Self::Aged>;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
let result = mem::transmute_copy(&self);
mem::forget(self);
result
}
unsafe fn change_lifetime_ref(&'a self) -> &'a Self::Aged {
&*(self as *const Self as *const Self::Aged)
}
unsafe fn change_lifetime_mut(&'a mut self) -> &'a mut Self::Aged {
&mut *(self as *mut Self as *mut Self::Aged)
}
fn contract_lifetime(self) -> Self::Aged where Self: 'a + Sized {
unsafe { self.change_lifetime() }
}
fn contract_lifetime_ref(&'a self) -> &'a Self::Aged where Self: 'a {
unsafe { self.change_lifetime_ref() }
}
fn contract_lifetime_mut(&'a mut self) -> &'a mut Self::Aged where Self: 'a {
unsafe { self.change_lifetime_mut() }
}
}
unsafe impl<'a, C> JSManageable<'a, C> for String {
type Aged = String;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
self
}
}
unsafe impl<'a, C> JSManageable<'a, C> for usize {
type Aged = usize;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
self
}
}
unsafe impl<'a, C, T> JSManageable<'a, C> for Vec<T> where T: JSManageable<'a, C> {
type Aged = Vec<T::Aged>;
unsafe fn change_lifetime(self) -> Self::Aged where Self: Sized {
mem::transmute(self)
}
}
// etc.
/// A user of a JS runtime implements `JSRunnable`.
pub trait JSRunnable: Sized {
/// This callback is called with a fresh JS compartment type `C`.
fn run<C, S>(self, cx: JSContext<S>) where S: CanInitialize<C>;
/// To trigger the callback, call `rt.start()`.
fn start(self) {
struct JSCompartmentImpl;
let cx = JSContext {
state: Uninitialized(PhantomData),
};
self.run::<JSCompartmentImpl, Uninitialized<JSCompartmentImpl>>(cx);
}
}
/// The type of JS-managed data in a JS compartment `C`, with lifetime `'a`.
///
/// If the user has access to a `JSManaged`, then the JS-managed
/// data is live for the given lifetime.
pub struct JSManaged<'a, C, T: ?Sized> {
// JS reflector goes here
// This raw pointer should really be of type `*mut T`, but that is invariant in T.
// To make the type variant in T, we use a `*mut ()` instead.
raw: *mut (),
marker: PhantomData<(&'a(), C, T)>,
}
impl<'a, C, T: ?Sized> Clone for JSManaged<'a, C, T> {
fn clone(&self) -> Self {
JSManaged {
raw: self.raw,
marker: self.marker,
}
}
}
impl<'a, C, T: ?Sized> Copy for JSManaged<'a, C, T> {
}
unsafe impl<'a, C, T: ?Sized> JSTraceable for JSManaged<'a, C, T> where
T: JSTraceable
{}
unsafe impl<'a, 'b, C: 'b, T: ?Sized> JSManageable<'b, C> for JSManaged<'a, C, T> where
T: JSManageable<'b, C>,
{
type Aged = JSManaged<'b, C, T::Aged>;
}
impl<'a, C, T: ?Sized> JSManaged<'a, C, T> {
/// Read-only access to JS-managed data.
pub fn get<'b, S>(self, _: &'b JSContext<S>) -> &'b T::Aged where
S: CanAccess<C>,
T: JSManageable<'b, C>,
'a: 'b,
{
unsafe { &*(self.raw as *mut T::Aged) }
}
/// Read-write access to JS-managed data.
pub fn get_mut<'b, S>(self, _: &'b mut JSContext<S>) -> &'b mut T::Aged where
S: CanAccess<C>,
T: JSManageable<'b, C>,
'a: 'b,
{
unsafe { &mut *(self.raw as *mut T::Aged) }
}
/// Change the lifetime of JS-managed data.
pub unsafe fn change_lifetime<'b>(self) -> JSManaged<'b, C, T::Aged> where
T: JSManageable<'b, C>,
{
JSManaged {
raw: self.raw,
marker: PhantomData,
}
}
/// It's safe to contract the lifetime of JS-managed data.
pub fn contract_lifetime<'b>(self) -> JSManaged<'b, C, T::Aged> where
T: JSManageable<'b, C>,
'a: 'b,
{
unsafe { self.change_lifetime() }
}
/// It's safe to extend the lifetime of JS-managed data if it has been snapshotted.
pub fn extend_lifetime<'b, 'c, S>(self, _: &'c JSContext<S>) -> JSManaged<'b, C, T::Aged> where
C: 'b,
S: CanExtend<'b, C>,
T: JSManageable<'b, C>,
'b: 'a,
{
unsafe { self.change_lifetime() }
}
}
/// A stack allocated root
pub struct JSRoot<T> {
value: Option<T>,
pin: JSUntypedPinnedRoot,
roots: *mut JSPinnedRoots,
}
/// A stack allocated root that haz been pinned, so the backing store can't move.
pub struct JSPinnedRoot<'a, T:'a> (&'a mut JSRoot<T>);
/// A doubly linked list with all the pinned roots.
#[derive(Eq, PartialEq)]
pub struct JSPinnedRoots(*mut JSUntypedPinnedRoot);
/// A stack allocated root that has been pinned, but we don't have a type for the contents
struct JSUntypedPinnedRoot {
value: *mut JSTraceable,
next: *mut JSUntypedPinnedRoot,
prev: *mut JSUntypedPinnedRoot,
}
impl<T> JSRoot<T> {
// Very annoyingly, this function has to be marked as unsafe,
// because we can't rely on the destructor for the pinned root running.
// See the discussion about `mem::forget` being safe at
// https://github.com/rust-lang/rfcs/pull/1066.
// This is safe as long as it is unpinned before the memory
// is reclaimed, but Rust does not enforce that.
pub unsafe fn pin<'a, C, U>(&'a mut self, value: U) -> JSPinnedRoot<'a, T> where
T: JSManageable<'a, C, Aged=T>,
U: JSManageable<'a, C, Aged=T>,
{
self.value = Some(value.change_lifetime());
self.pin.value = self.value.as_mut_ptr();
self.pin.next = (*self.roots).0;
self.pin.prev = ptr::null_mut();
if let Some(next) = self.pin.next.as_mut() {
next.prev = &mut self.pin;
}
*self.roots = JSPinnedRoots(&mut self.pin);
JSPinnedRoot(self)
}
pub unsafe fn unpin(&mut self) {
if let Some(next) = self.pin.next.as_mut() {
next.prev = self.pin.prev;
}
if let Some(prev) = self.pin.prev.as_mut() {
prev.next = self.pin.next;
}
if *self.roots == JSPinnedRoots(&mut self.pin) {
*self.roots = JSPinnedRoots(self.pin.next);
}
self.value = None;
self.pin.value = mem::zeroed();
self.pin.next = ptr::null_mut();
self.pin.prev = ptr::null_mut();
}
}
impl<'a, T> JSPinnedRoot<'a, T> {
pub fn get<'b, C>(&'b self) -> T::Aged where
T: JSManageable<'b, C>,
T::Aged: Copy,
{
*self.get_ref()
}
pub fn get_ref<'b, C>(&'b self) -> &'b T::Aged where
T: JSManageable<'b, C>,
{
self.0.value.as_ref().unwrap().contract_lifetime_ref()
}
pub fn get_mut<'b, C>(&'b mut self) -> &'b mut T::Aged where
T: JSManageable<'b, C>,
{
self.0.value.as_mut().unwrap().contract_lifetime_mut()
}
}
impl<'a, T> Drop for JSPinnedRoot<'a, T> {
fn drop(&mut self) {
unsafe { self.0.unpin() }
}
}
#[macro_export]
macro_rules! rooted {
(in($cx:expr) let $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let $name = __pinned.get();
);
(in($cx:expr) let mut $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let mut $name = __pinned.get();
);
(in($cx:expr) let ref $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let $name = __pinned.get_ref();
);
(in($cx:expr) let ref mut $name:ident = $init:expr) => (
let mut __root = $cx.new_root();
#[allow(unsafe_code)]
let ref __pinned = unsafe { __root.pin($init) };
let mut $name = __pinned.get_mut();
)
}
|
use std::time::Duration;
use plist::Plist;
use std::collections::BTreeMap;
use Stream;
use Result;
use Error;
use message_type;
/// A Client for usbmuxd.
pub struct Client {
stream: Stream,
}
impl Client {
/// Tries to create a new instance of the `Client`.
pub fn new() -> Result<Self> {
let mut stream = try!(Stream::connect());
try!(stream.set_send_tymeout(Some(Duration::new(1, 0))));
try!(stream.set_receive_timeout(Some(Duration::new(1, 0))));
Ok(Client {
stream: stream,
})
}
/// Returns a list of connected devices.
pub fn devices(&mut self) -> Result<Vec<Device>> {
let mut plist = try!(self.request(Plist::Dictionary(message_type("ListDevices"))));
let mut dict = try!(plist.as_dictionary_mut().ok_or(Error::UnexpectedFormat));
match dict.remove("DeviceList") {
Some(Plist::Array(array)) => {
let results = array.into_iter().filter_map(|item| {
match item {
Plist::Dictionary(mut dict) => {
match dict.remove("Properties") {
Some(plist) => Device::from_plist(plist),
_ => None,
}
},
_ => None,
}
}).collect();
Ok(results)
},
_ => Err(Error::UnexpectedFormat),
}
}
/// Sends a request and receives a response.
pub fn request(&mut self, message: Plist) -> Result<Plist> {
try!(self.stream.send(message));
Ok(try!(self.stream.receive()))
}
}
/// Represents a device.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Device {
pub device_id: u32,
pub product_id: u32,
pub location_id: u32,
pub serial_number: String,
}
impl Device {
/// Creates an instance of `Device` from plist.
pub fn from_plist(mut plist: Plist) -> Option<Device> {
let mut dict = try_opt!(plist.as_dictionary_mut());
Some(Device {
device_id: try_opt!(dict.get("DeviceID").and_then(Plist::as_integer).map(|x| x as u32)),
product_id: try_opt!(dict.get("ProductID").and_then(Plist::as_integer).map(|x| x as u32)),
location_id: try_opt!(dict.get("LocationID").and_then(Plist::as_integer).map(|x| x as u32)),
serial_number: try_opt!(dict.get("SerialNumber").and_then(Plist::as_string).map(|s| s.to_owned())),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use expectest::prelude::*;
use plist::Plist;
use std::collections::BTreeMap;
#[test]
fn test_device_from_plist() {
let mut map = BTreeMap::new();
map.insert("ConnectionSpeed".to_owned(), Plist::Integer(480000000));
map.insert("ConnectionType".to_owned(), Plist::String("USB".to_owned()));
map.insert("DeviceID".to_owned(), Plist::Integer(3));
map.insert("LocationID".to_owned(), Plist::Integer(336592896));
map.insert("ProductID".to_owned(), Plist::Integer(4778));
map.insert("SerialNumber".to_owned(),Plist::String("fffffffff".to_owned()));
let device = Device {
device_id: 3,
product_id: 4778,
location_id: 336592896,
serial_number: "fffffffff".to_owned(),
};
expect!(Device::from_plist(Plist::Dictionary(map))).to(be_some().value(device));
}
}
Update with plist crate.
use std::time::Duration;
use plist::Plist;
use Stream;
use Result;
use Error;
use message_type;
/// A Client for usbmuxd.
pub struct Client {
stream: Stream,
}
impl Client {
/// Tries to create a new instance of the `Client`.
pub fn new() -> Result<Self> {
let mut stream = try!(Stream::connect());
try!(stream.set_send_tymeout(Some(Duration::new(1, 0))));
try!(stream.set_receive_timeout(Some(Duration::new(1, 0))));
Ok(Client {
stream: stream,
})
}
/// Returns a list of connected devices.
pub fn devices(&mut self) -> Result<Vec<Device>> {
let mut plist = try!(self.request(Plist::Dictionary(message_type("ListDevices"))));
let mut dict = try!(plist.as_dictionary_mut().ok_or(Error::UnexpectedFormat));
match dict.remove("DeviceList") {
Some(Plist::Array(array)) => {
let results = array.into_iter().filter_map(|item| {
match item {
Plist::Dictionary(mut dict) => {
match dict.remove("Properties") {
Some(plist) => Device::from_plist(plist),
_ => None,
}
},
_ => None,
}
}).collect();
Ok(results)
},
_ => Err(Error::UnexpectedFormat),
}
}
/// Sends a request and receives a response.
pub fn request(&mut self, message: Plist) -> Result<Plist> {
try!(self.stream.send(message));
Ok(try!(self.stream.receive()))
}
}
/// Represents a device.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Device {
pub device_id: u32,
pub product_id: u32,
pub location_id: u32,
pub serial_number: String,
}
impl Device {
/// Creates an instance of `Device` from plist.
pub fn from_plist(mut plist: Plist) -> Option<Device> {
let mut dict = try_opt!(plist.as_dictionary_mut());
Some(Device {
device_id: try_opt!(dict.get("DeviceID").and_then(Plist::as_integer).map(|x| x as u32)),
product_id: try_opt!(dict.get("ProductID").and_then(Plist::as_integer).map(|x| x as u32)),
location_id: try_opt!(dict.get("LocationID").and_then(Plist::as_integer).map(|x| x as u32)),
serial_number: try_opt!(dict.remove("SerialNumber").and_then(Plist::into_string)),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use expectest::prelude::*;
use plist::Plist;
use std::collections::BTreeMap;
#[test]
fn test_device_from_plist() {
let mut map = BTreeMap::new();
map.insert("ConnectionSpeed".to_owned(), Plist::Integer(480000000));
map.insert("ConnectionType".to_owned(), Plist::String("USB".to_owned()));
map.insert("DeviceID".to_owned(), Plist::Integer(3));
map.insert("LocationID".to_owned(), Plist::Integer(336592896));
map.insert("ProductID".to_owned(), Plist::Integer(4778));
map.insert("SerialNumber".to_owned(),Plist::String("fffffffff".to_owned()));
let device = Device {
device_id: 3,
product_id: 4778,
location_id: 336592896,
serial_number: "fffffffff".to_owned(),
};
expect!(Device::from_plist(Plist::Dictionary(map))).to(be_some().value(device));
}
} |
Add IrcClient struct
use user::IrcUser;
use std::net::ToSocketAddrs;
#[derive(Debug)]
pub struct IrcClient<A: ToSocketAddrs> {
pub server: A,
pub user: IrcUser
}
impl <A: ToSocketAddrs> IrcClient<A> {
pub fn new(server: A, user: IrcUser) -> IrcClient<A> {
IrcClient {
server: server,
user: user
}
}
}
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! libc - Raw FFI bindings to platforms' system libraries
#![crate_name = "libc"]
#![crate_type = "rlib"]
#![cfg_attr(not(feature = "rustc-dep-of-std"), deny(warnings))]
#![allow(bad_style, overflowing_literals, improper_ctypes, unknown_lints)]
// Attributes needed when building as part of the standard library
#![cfg_attr(
feature = "rustc-dep-of-std",
feature(cfg_target_vendor, link_cfg, no_core)
)]
// Enable extra lints:
#![cfg_attr(feature = "extra_traits", deny(missing_debug_implementations))]
#![deny(missing_copy_implementations, safe_packed_borrows)]
#![no_std]
#![cfg_attr(feature = "rustc-dep-of-std", no_core)]
#[macro_use]
mod macros;
cfg_if! {
if #[cfg(feature = "rustc-dep-of-std")] {
extern crate rustc_std_workspace_core as core;
#[allow(unused_imports)]
use core::iter;
#[allow(unused_imports)]
use core::option;
}
}
cfg_if! {
if #[cfg(libc_priv_mod_use)] {
#[cfg(libc_core_cvoid)]
#[allow(unused_imports)]
use core::ffi;
#[allow(unused_imports)]
use core::fmt;
#[allow(unused_imports)]
use core::hash;
#[allow(unused_imports)]
use core::num;
#[allow(unused_imports)]
use core::mem;
#[doc(hidden)]
#[allow(unused_imports)]
use core::clone::Clone;
#[doc(hidden)]
#[allow(unused_imports)]
use core::marker::Copy;
#[doc(hidden)]
#[allow(unused_imports)]
use core::option::Option;
} else {
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::fmt;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::hash;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::num;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::mem;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::clone::Clone;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::marker::Copy;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::option::Option;
}
}
cfg_if! {
if #[cfg(windows)] {
mod windows;
pub use windows::*;
} else if #[cfg(target_os = "redox")] {
mod redox;
pub use redox::*;
} else if #[cfg(target_os = "cloudabi")] {
mod cloudabi;
pub use cloudabi::*;
} else if #[cfg(target_os = "fuchsia")] {
mod fuchsia;
pub use fuchsia::*;
} else if #[cfg(target_os = "switch")] {
mod switch;
pub use switch::*;
} else if #[cfg(unix)] {
mod unix;
pub use unix::*;
} else if #[cfg(target_env = "sgx")] {
mod sgx;
pub use sgx::*;
} else {
// non-supported targets: empty...
}
}
Auto merge of #1290 - gnzlbg:doc_link, r=gnzlbg
Link docs for other platforms in docs.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! libc - Raw FFI bindings to platforms' system libraries
//!
//! [Documentation for other platforms][pd].
//!
//! [pd]: https://rust-lang.github.io/libc/#platform-specific-documentation
#![crate_name = "libc"]
#![crate_type = "rlib"]
#![cfg_attr(not(feature = "rustc-dep-of-std"), deny(warnings))]
#![allow(bad_style, overflowing_literals, improper_ctypes, unknown_lints)]
// Attributes needed when building as part of the standard library
#![cfg_attr(
feature = "rustc-dep-of-std",
feature(cfg_target_vendor, link_cfg, no_core)
)]
// Enable extra lints:
#![cfg_attr(feature = "extra_traits", deny(missing_debug_implementations))]
#![deny(missing_copy_implementations, safe_packed_borrows)]
#![no_std]
#![cfg_attr(feature = "rustc-dep-of-std", no_core)]
#[macro_use]
mod macros;
cfg_if! {
if #[cfg(feature = "rustc-dep-of-std")] {
extern crate rustc_std_workspace_core as core;
#[allow(unused_imports)]
use core::iter;
#[allow(unused_imports)]
use core::option;
}
}
cfg_if! {
if #[cfg(libc_priv_mod_use)] {
#[cfg(libc_core_cvoid)]
#[allow(unused_imports)]
use core::ffi;
#[allow(unused_imports)]
use core::fmt;
#[allow(unused_imports)]
use core::hash;
#[allow(unused_imports)]
use core::num;
#[allow(unused_imports)]
use core::mem;
#[doc(hidden)]
#[allow(unused_imports)]
use core::clone::Clone;
#[doc(hidden)]
#[allow(unused_imports)]
use core::marker::Copy;
#[doc(hidden)]
#[allow(unused_imports)]
use core::option::Option;
} else {
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::fmt;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::hash;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::num;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::mem;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::clone::Clone;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::marker::Copy;
#[doc(hidden)]
#[allow(unused_imports)]
pub use core::option::Option;
}
}
cfg_if! {
if #[cfg(windows)] {
mod windows;
pub use windows::*;
} else if #[cfg(target_os = "redox")] {
mod redox;
pub use redox::*;
} else if #[cfg(target_os = "cloudabi")] {
mod cloudabi;
pub use cloudabi::*;
} else if #[cfg(target_os = "fuchsia")] {
mod fuchsia;
pub use fuchsia::*;
} else if #[cfg(target_os = "switch")] {
mod switch;
pub use switch::*;
} else if #[cfg(unix)] {
mod unix;
pub use unix::*;
} else if #[cfg(target_env = "sgx")] {
mod sgx;
pub use sgx::*;
} else {
// non-supported targets: empty...
}
}
|
use std::io::BufRead;
pub fn read_line() -> String {
let stdin = std::io::stdin();
let stdin = stdin.lock();
let mut lines = stdin.lines();
lines.next().unwrap().unwrap()
}
a generic read function for all FromStr types
use std::io::{BufRead, Read};
use std::str::{FromStr, from_utf8};
use std::fmt::Debug;
pub fn read_line() -> String {
let stdin = std::io::stdin();
let stdin = stdin.lock();
let mut lines = stdin.lines();
lines.next().unwrap().unwrap()
}
pub fn read<T: FromStr>() -> T
where <T as FromStr>::Err: Debug
{
let stdin = std::io::stdin();
let stdin = stdin.lock();
let v: Vec<u8> = stdin.bytes()
.map(|c| c.unwrap())
.take_while(|c| b" \n\r\t".contains(&c))
.collect();
FromStr::from_str(from_utf8(&v).unwrap()).unwrap()
}
|
extern crate byteorder;
use std::{fmt, error, str};
use byteorder::{BigEndian, ByteOrder};
mod tables;
/// Available encoding character sets
#[derive(Clone, Copy, Debug)]
pub enum CharacterSet {
/// The standard character set (uses `+` and `/`)
Standard,
/// The URL safe character set (uses `-` and `_`)
UrlSafe
}
#[derive(Clone, Copy, Debug)]
pub enum LineEnding {
LF,
CRLF,
}
#[derive(Clone, Copy, Debug)]
pub enum LineWrap {
NoWrap,
Wrap(usize, LineEnding)
}
/// Contains configuration parameters for base64 encoding
#[derive(Clone, Copy, Debug)]
pub struct Config {
/// Character set to use
char_set: CharacterSet,
/// True to pad output with `=` characters
pad: bool,
/// Remove whitespace before decoding, at the cost of an allocation
strip_whitespace: bool,
/// ADT signifying whether to linewrap output, and if so by how many characters and with what ending
line_wrap: LineWrap,
}
impl Config {
pub fn new(char_set: CharacterSet,
pad: bool,
strip_whitespace: bool,
input_line_wrap: LineWrap) -> Config {
let line_wrap = match input_line_wrap {
LineWrap::Wrap(0, _) => LineWrap::NoWrap,
_ => input_line_wrap,
};
Config {
char_set: char_set,
pad: pad,
strip_whitespace: strip_whitespace,
line_wrap: line_wrap,
}
}
}
pub static STANDARD: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
pub static MIME: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
strip_whitespace: true,
line_wrap: LineWrap::Wrap(76, LineEnding::CRLF),
};
pub static URL_SAFE: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: true,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
pub static URL_SAFE_NO_PAD: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: false,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
#[derive(Debug, PartialEq, Eq)]
pub enum DecodeError {
InvalidByte(usize, u8),
InvalidLength,
}
impl fmt::Display for DecodeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
DecodeError::InvalidByte(index, byte) =>
write!(f, "Invalid byte {}, offset {}.", byte, index),
DecodeError::InvalidLength =>
write!(f, "Encoded text cannot have a 6-bit remainder.")
}
}
}
impl error::Error for DecodeError {
fn description(&self) -> &str {
match *self {
DecodeError::InvalidByte(_, _) => "invalid byte",
DecodeError::InvalidLength => "invalid length"
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
///Encode arbitrary octets as base64.
///Returns a String.
///Convenience for `encode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode(b"hello world");
/// println!("{}", b64);
///}
///```
pub fn encode<T: ?Sized + AsRef<[u8]>>(input: &T) -> String {
encode_config(input, STANDARD)
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///Convenience `decode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode("aGVsbG8gd29ybGQ=").unwrap();
/// println!("{:?}", bytes);
///}
///```
pub fn decode<T: ?Sized + AsRef<[u8]>>(input: &T) -> Result<Vec<u8>, DecodeError> {
decode_config(input, STANDARD)
}
///Encode arbitrary octets as base64.
///Returns a String.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode_config(b"hello world~", base64::STANDARD);
/// println!("{}", b64);
///
/// let b64_url = base64::encode_config(b"hello internet~", base64::URL_SAFE);
/// println!("{}", b64_url);
///}
///```
pub fn encode_config<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config) -> String {
let mut buf = String::with_capacity(encoded_size(input.as_ref().len(), config));
encode_config_buf(input, config, &mut buf);
buf
}
/// calculate the base64 encoded string size, including padding
fn encoded_size(bytes_len: usize, config: Config) -> usize {
let rem = bytes_len % 3;
let complete_input_chunks = bytes_len / 3;
let complete_output_chars = complete_input_chunks * 4;
let printing_output_chars = if rem == 0 {
complete_output_chars
} else {
complete_output_chars + 4
};
let line_ending_output_chars = match config.line_wrap {
LineWrap::NoWrap => 0,
LineWrap::Wrap(n, LineEnding::CRLF) => printing_output_chars / n * 2,
LineWrap::Wrap(n, LineEnding::LF) => printing_output_chars / n,
};
return printing_output_chars + line_ending_output_chars;
}
///Encode arbitrary octets as base64.
///Writes into the supplied buffer to avoid allocations.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buf = String::new();
/// base64::encode_config_buf(b"hello world~", base64::STANDARD, &mut buf);
/// println!("{}", buf);
///
/// buf.clear();
/// base64::encode_config_buf(b"hello internet~", base64::URL_SAFE, &mut buf);
/// println!("{}", buf);
///}
///```
pub fn encode_config_buf<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config, buf: &mut String) {
let input_bytes = input.as_ref();
let ref charset = match config.char_set {
CharacterSet::Standard => tables::STANDARD_ENCODE,
CharacterSet::UrlSafe => tables::URL_SAFE_ENCODE,
};
// reserve to make sure the memory we'll be writing to with unsafe is allocated
buf.reserve(encoded_size(input_bytes.len(), config));
let orig_buf_len = buf.len();
let mut fast_loop_output_buf_len = orig_buf_len;
let input_chunk_len = 6;
let last_fast_index = input_bytes.len().saturating_sub(8);
// we're only going to insert valid utf8
let mut raw = unsafe { buf.as_mut_vec() };
// start at the first free part of the output buf
let mut output_ptr = unsafe { raw.as_mut_ptr().offset(orig_buf_len as isize) };
let mut input_index: usize = 0;
if input_bytes.len() >= 8 {
while input_index <= last_fast_index {
let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
// strip off 6 bits at a time for the first 6 bytes
unsafe {
std::ptr::write(output_ptr, charset[((input_chunk >> 58) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(1), charset[((input_chunk >> 52) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(2), charset[((input_chunk >> 46) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(3), charset[((input_chunk >> 40) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(4), charset[((input_chunk >> 34) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(5), charset[((input_chunk >> 28) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(6), charset[((input_chunk >> 22) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(7), charset[((input_chunk >> 16) & 0x3F) as usize]);
output_ptr = output_ptr.offset(8);
}
input_index += input_chunk_len;
fast_loop_output_buf_len += 8;
}
}
unsafe {
// expand len to include the bytes we just wrote
raw.set_len(fast_loop_output_buf_len);
}
// encode the 0 to 7 bytes left after the fast loop
let rem = input_bytes.len() % 3;
let start_of_rem = input_bytes.len() - rem;
// start at the first index not handled by fast loop, which may be 0.
let mut leftover_index = input_index;
while leftover_index < start_of_rem {
raw.push(charset[(input_bytes[leftover_index] >> 2) as usize]);
raw.push(charset[((input_bytes[leftover_index] << 4 | input_bytes[leftover_index + 1] >> 4) & 0x3f) as usize]);
raw.push(charset[((input_bytes[leftover_index + 1] << 2 | input_bytes[leftover_index + 2] >> 6) & 0x3f) as usize]);
raw.push(charset[(input_bytes[leftover_index + 2] & 0x3f) as usize]);
leftover_index += 3;
}
if rem == 2 {
raw.push(charset[(input_bytes[start_of_rem] >> 2) as usize]);
raw.push(charset[((input_bytes[start_of_rem] << 4 | input_bytes[start_of_rem + 1] >> 4) & 0x3f) as usize]);
raw.push(charset[(input_bytes[start_of_rem + 1] << 2 & 0x3f) as usize]);
} else if rem == 1 {
raw.push(charset[(input_bytes[start_of_rem] >> 2) as usize]);
raw.push(charset[(input_bytes[start_of_rem] << 4 & 0x3f) as usize]);
}
if config.pad {
for _ in 0..((3 - rem) % 3) {
raw.push(0x3d);
}
}
//TODO FIXME this does the wrong thing for nonempty buffers
if orig_buf_len == 0 {
if let LineWrap::Wrap(line_size, line_end) = config.line_wrap {
let len = raw.len();
let mut i = 0;
let mut j = 0;
while i < len {
if i > 0 && i % line_size == 0 {
match line_end {
LineEnding::LF => { raw.insert(j, b'\n'); j += 1; }
LineEnding::CRLF => { raw.insert(j, b'\r'); raw.insert(j + 1, b'\n'); j += 2; }
}
}
i += 1;
j += 1;
}
}
}
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode_config("aGVsbG8gd29ybGR+Cg==", base64::STANDARD).unwrap();
/// println!("{:?}", bytes);
///
/// let bytes_url = base64::decode_config("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE).unwrap();
/// println!("{:?}", bytes_url);
///}
///```
pub fn decode_config<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config) -> Result<Vec<u8>, DecodeError> {
let mut buffer = Vec::<u8>::with_capacity(input.as_ref().len() * 4 / 3);
decode_config_buf(input, config, &mut buffer).map(|_| buffer)
}
///Decode from string reference as octets.
///Writes into the supplied buffer to avoid allocation.
///Returns a Result containing an empty tuple, aka ().
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buffer = Vec::<u8>::new();
/// base64::decode_config_buf("aGVsbG8gd29ybGR+Cg==", base64::STANDARD, &mut buffer).unwrap();
/// println!("{:?}", buffer);
///
/// buffer.clear();
///
/// base64::decode_config_buf("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE, &mut buffer).unwrap();
/// println!("{:?}", buffer);
///}
///```
pub fn decode_config_buf<T: ?Sized + AsRef<[u8]>>(input: &T,
config: Config,
buffer: &mut Vec<u8>)
-> Result<(), DecodeError> {
let mut input_copy;
let input_bytes = if config.strip_whitespace {
input_copy = Vec::<u8>::with_capacity(input.as_ref().len());
input_copy.extend(input.as_ref().iter().filter(|b| !b" \n\t\r\x0b\x0c".contains(b)));
input_copy.as_ref()
} else {
input.as_ref()
};
let ref decode_table = match config.char_set {
CharacterSet::Standard => tables::STANDARD_DECODE,
CharacterSet::UrlSafe => tables::URL_SAFE_DECODE,
};
buffer.reserve(input_bytes.len() * 3 / 4);
// the fast loop only handles complete chunks of 8 input bytes without padding
let chunk_len = 8;
let decoded_chunk_len = 6;
let remainder_len = input_bytes.len() % chunk_len;
let trailing_bytes_to_skip = if remainder_len == 0 {
// if input is a multiple of the chunk size, ignore the last chunk as it may have padding
chunk_len
} else {
remainder_len
};
let length_of_full_chunks = input_bytes.len().saturating_sub(trailing_bytes_to_skip);
let starting_output_index = buffer.len();
// Resize to hold decoded output from fast loop. Need the extra two bytes because
// we write a full 8 bytes for the last 6-byte decoded chunk and then truncate off two
let new_size = starting_output_index
+ length_of_full_chunks / chunk_len * decoded_chunk_len
+ (chunk_len - decoded_chunk_len);
buffer.resize(new_size, 0);
let mut output_index = starting_output_index;
{
let buffer_slice = buffer.as_mut_slice();
let mut input_index = 0;
// initial value is never used; always set if fast loop breaks
let mut bad_byte_index: usize = 0;
// a non-invalid value means it's not an error if fast loop never runs
let mut morsel: u8 = 0;
// fast loop of 8 bytes at a time
while input_index < length_of_full_chunks {
let mut accum: u64;
let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
morsel = decode_table[(input_chunk >> 56) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index;
break;
};
accum = (morsel as u64) << 58;
morsel = decode_table[(input_chunk >> 48 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 1;
break;
};
accum |= (morsel as u64) << 52;
morsel = decode_table[(input_chunk >> 40 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 2;
break;
};
accum |= (morsel as u64) << 46;
morsel = decode_table[(input_chunk >> 32 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 3;
break;
};
accum |= (morsel as u64) << 40;
morsel = decode_table[(input_chunk >> 24 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 4;
break;
};
accum |= (morsel as u64) << 34;
morsel = decode_table[(input_chunk >> 16 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 5;
break;
};
accum |= (morsel as u64) << 28;
morsel = decode_table[(input_chunk >> 8 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 6;
break;
};
accum |= (morsel as u64) << 22;
morsel = decode_table[(input_chunk & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 7;
break;
};
accum |= (morsel as u64) << 16;
BigEndian::write_u64(&mut buffer_slice[(output_index)..(output_index + 8)],
accum);
output_index += 6;
input_index += chunk_len;
};
if morsel == tables::INVALID_VALUE {
// we got here from a break
return Err(DecodeError::InvalidByte(bad_byte_index, input_bytes[bad_byte_index]));
}
}
// Truncate off the last two bytes from writing the last u64.
// Unconditional because we added on the extra 2 bytes in the resize before the loop,
// so it will never underflow.
let new_len = buffer.len() - (chunk_len - decoded_chunk_len);
buffer.truncate(new_len);
// handle leftovers (at most 8 bytes, decoded to 6).
// Use a u64 as a stack-resident 8 bytes buffer.
let mut leftover_bits: u64 = 0;
let mut morsels_in_leftover = 0;
let mut padding_bytes = 0;
let mut first_padding_index: usize = 0;
for (i, b) in input_bytes[length_of_full_chunks..].iter().enumerate() {
// '=' padding
if *b == 0x3D {
// There can be bad padding in a few ways:
// 1 - Padding with non-padding characters after it
// 2 - Padding after zero or one non-padding characters before it
// in the current quad.
// 3 - More than two characters of padding. If 3 or 4 padding chars
// are in the same quad, that implies it will be caught by #2.
// If it spreads from one quad to another, it will be caught by
// #2 in the second quad.
if i % 4 < 2 {
// Check for case #2.
// TODO InvalidPadding error
return Err(DecodeError::InvalidByte(length_of_full_chunks + i, *b));
};
if padding_bytes == 0 {
first_padding_index = i;
};
padding_bytes += 1;
continue;
};
// Check for case #1.
// To make '=' handling consistent with the main loop, don't allow
// non-suffix '=' in trailing chunk either. Report error as first
// erroneous padding.
if padding_bytes > 0 {
return Err(DecodeError::InvalidByte(
length_of_full_chunks + first_padding_index, 0x3D));
};
// can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
// To minimize shifts, pack the leftovers from left to right.
let shift = 64 - (morsels_in_leftover + 1) * 6;
// tables are all 256 elements, cannot overflow from a u8 index
let morsel = decode_table[*b as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(length_of_full_chunks + i, *b));
};
leftover_bits |= (morsel as u64) << shift;
morsels_in_leftover += 1;
};
let leftover_bits_ready_to_append = match morsels_in_leftover {
0 => 0,
1 => return Err(DecodeError::InvalidLength),
2 => 8,
3 => 16,
4 => 24,
5 => return Err(DecodeError::InvalidLength),
6 => 32,
7 => 40,
8 => 48,
_ => panic!("Impossible: must only have 0 to 4 input bytes in last quad")
};
let mut leftover_bits_appended_to_buf = 0;
while leftover_bits_appended_to_buf < leftover_bits_ready_to_append {
// `as` simply truncates the higher bits, which is what we want here
let selected_bits = (leftover_bits >> (56 - leftover_bits_appended_to_buf)) as u8;
buffer.push(selected_bits);
leftover_bits_appended_to_buf += 8;
};
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn encoded_size_correct() {
assert_eq!(0, encoded_size(0, STANDARD));
assert_eq!(4, encoded_size(1, STANDARD));
assert_eq!(4, encoded_size(2, STANDARD));
assert_eq!(4, encoded_size(3, STANDARD));
assert_eq!(8, encoded_size(4, STANDARD));
assert_eq!(8, encoded_size(5, STANDARD));
assert_eq!(8, encoded_size(6, STANDARD));
assert_eq!(12, encoded_size(7, STANDARD));
assert_eq!(12, encoded_size(8, STANDARD));
assert_eq!(12, encoded_size(9, STANDARD));
assert_eq!(72, encoded_size(54, STANDARD));
assert_eq!(76, encoded_size(55, STANDARD));
assert_eq!(76, encoded_size(56, STANDARD));
assert_eq!(76, encoded_size(57, STANDARD));
assert_eq!(80, encoded_size(58, STANDARD));
}
#[test]
fn encoded_size_correct_mime() {
assert_eq!(0, encoded_size(0, MIME));
assert_eq!(4, encoded_size(1, MIME));
assert_eq!(4, encoded_size(2, MIME));
assert_eq!(4, encoded_size(3, MIME));
assert_eq!(8, encoded_size(4, MIME));
assert_eq!(8, encoded_size(5, MIME));
assert_eq!(8, encoded_size(6, MIME));
assert_eq!(12, encoded_size(7, MIME));
assert_eq!(12, encoded_size(8, MIME));
assert_eq!(12, encoded_size(9, MIME));
assert_eq!(72, encoded_size(54, MIME));
assert_eq!(78, encoded_size(55, MIME));
assert_eq!(78, encoded_size(56, MIME));
assert_eq!(78, encoded_size(57, MIME));
assert_eq!(82, encoded_size(58, MIME));
}
#[test]
fn encoded_size_correct_lf() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(76, LineEnding::LF)
);
assert_eq!(0, encoded_size(0, config));
assert_eq!(4, encoded_size(1, config));
assert_eq!(4, encoded_size(2, config));
assert_eq!(4, encoded_size(3, config));
assert_eq!(8, encoded_size(4, config));
assert_eq!(8, encoded_size(5, config));
assert_eq!(8, encoded_size(6, config));
assert_eq!(12, encoded_size(7, config));
assert_eq!(12, encoded_size(8, config));
assert_eq!(12, encoded_size(9, config));
assert_eq!(72, encoded_size(54, config));
assert_eq!(77, encoded_size(55, config));
assert_eq!(77, encoded_size(56, config));
assert_eq!(77, encoded_size(57, config));
assert_eq!(81, encoded_size(58, config));
}
}
Use checked arithmetic in encoded_size
previously encoded_size could silently overflow usize, resulting in
write past the bounds of the buffer allocated by reserve. this changes
encoded_size to return an option, with none if overflow occurs.
presently callers simply panic on this case, but it could conceivably be
rendered as an error in the future
credit to Andrew Ayer for reporting this vulnerability
extern crate byteorder;
use std::{fmt, error, str};
use byteorder::{BigEndian, ByteOrder};
mod tables;
/// Available encoding character sets
#[derive(Clone, Copy, Debug)]
pub enum CharacterSet {
/// The standard character set (uses `+` and `/`)
Standard,
/// The URL safe character set (uses `-` and `_`)
UrlSafe
}
#[derive(Clone, Copy, Debug)]
pub enum LineEnding {
LF,
CRLF,
}
#[derive(Clone, Copy, Debug)]
pub enum LineWrap {
NoWrap,
Wrap(usize, LineEnding)
}
/// Contains configuration parameters for base64 encoding
#[derive(Clone, Copy, Debug)]
pub struct Config {
/// Character set to use
char_set: CharacterSet,
/// True to pad output with `=` characters
pad: bool,
/// Remove whitespace before decoding, at the cost of an allocation
strip_whitespace: bool,
/// ADT signifying whether to linewrap output, and if so by how many characters and with what ending
line_wrap: LineWrap,
}
impl Config {
pub fn new(char_set: CharacterSet,
pad: bool,
strip_whitespace: bool,
input_line_wrap: LineWrap) -> Config {
let line_wrap = match input_line_wrap {
LineWrap::Wrap(0, _) => LineWrap::NoWrap,
_ => input_line_wrap,
};
Config {
char_set: char_set,
pad: pad,
strip_whitespace: strip_whitespace,
line_wrap: line_wrap,
}
}
}
pub static STANDARD: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
pub static MIME: Config = Config {
char_set: CharacterSet::Standard,
pad: true,
strip_whitespace: true,
line_wrap: LineWrap::Wrap(76, LineEnding::CRLF),
};
pub static URL_SAFE: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: true,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
pub static URL_SAFE_NO_PAD: Config = Config {
char_set: CharacterSet::UrlSafe,
pad: false,
strip_whitespace: false,
line_wrap: LineWrap::NoWrap,
};
#[derive(Debug, PartialEq, Eq)]
pub enum DecodeError {
InvalidByte(usize, u8),
InvalidLength,
}
impl fmt::Display for DecodeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
DecodeError::InvalidByte(index, byte) =>
write!(f, "Invalid byte {}, offset {}.", byte, index),
DecodeError::InvalidLength =>
write!(f, "Encoded text cannot have a 6-bit remainder.")
}
}
}
impl error::Error for DecodeError {
fn description(&self) -> &str {
match *self {
DecodeError::InvalidByte(_, _) => "invalid byte",
DecodeError::InvalidLength => "invalid length"
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
///Encode arbitrary octets as base64.
///Returns a String.
///Convenience for `encode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode(b"hello world");
/// println!("{}", b64);
///}
///```
pub fn encode<T: ?Sized + AsRef<[u8]>>(input: &T) -> String {
encode_config(input, STANDARD)
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///Convenience `decode_config(input, base64::STANDARD);`.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode("aGVsbG8gd29ybGQ=").unwrap();
/// println!("{:?}", bytes);
///}
///```
pub fn decode<T: ?Sized + AsRef<[u8]>>(input: &T) -> Result<Vec<u8>, DecodeError> {
decode_config(input, STANDARD)
}
///Encode arbitrary octets as base64.
///Returns a String.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let b64 = base64::encode_config(b"hello world~", base64::STANDARD);
/// println!("{}", b64);
///
/// let b64_url = base64::encode_config(b"hello internet~", base64::URL_SAFE);
/// println!("{}", b64_url);
///}
///```
pub fn encode_config<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config) -> String {
let mut buf = match encoded_size(input.as_ref().len(), config) {
Some(n) => String::with_capacity(n),
None => panic!("integer overflow when calculating buffer size")
};
encode_config_buf(input, config, &mut buf);
buf
}
/// calculate the base64 encoded string size, including padding
fn encoded_size(bytes_len: usize, config: Config) -> Option<usize> {
let printing_output_chars = bytes_len
.checked_add(2)
.map(|x| x / 3)
.and_then(|x| x.checked_mul(4));
//TODO this is subtly wrong but in a not dangerous way
//pushing patch with identical to previous behavior, then fixing
let line_ending_output_chars = match config.line_wrap {
LineWrap::NoWrap => Some(0),
LineWrap::Wrap(n, LineEnding::CRLF) =>
printing_output_chars.map(|y| y / n).and_then(|y| y.checked_mul(2)),
LineWrap::Wrap(n, LineEnding::LF) =>
printing_output_chars.map(|y| y / n),
};
printing_output_chars.and_then(|x|
line_ending_output_chars.and_then(|y| x.checked_add(y))
)
}
///Encode arbitrary octets as base64.
///Writes into the supplied buffer to avoid allocations.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buf = String::new();
/// base64::encode_config_buf(b"hello world~", base64::STANDARD, &mut buf);
/// println!("{}", buf);
///
/// buf.clear();
/// base64::encode_config_buf(b"hello internet~", base64::URL_SAFE, &mut buf);
/// println!("{}", buf);
///}
///```
pub fn encode_config_buf<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config, buf: &mut String) {
let input_bytes = input.as_ref();
let ref charset = match config.char_set {
CharacterSet::Standard => tables::STANDARD_ENCODE,
CharacterSet::UrlSafe => tables::URL_SAFE_ENCODE,
};
// reserve to make sure the memory we'll be writing to with unsafe is allocated
let resv_size = match encoded_size(input_bytes.len(), config) {
Some(n) => n,
None => panic!("integer overflow when calculating buffer size"),
};
buf.reserve(resv_size);
let orig_buf_len = buf.len();
let mut fast_loop_output_buf_len = orig_buf_len;
let input_chunk_len = 6;
let last_fast_index = input_bytes.len().saturating_sub(8);
// we're only going to insert valid utf8
let mut raw = unsafe { buf.as_mut_vec() };
// start at the first free part of the output buf
let mut output_ptr = unsafe { raw.as_mut_ptr().offset(orig_buf_len as isize) };
let mut input_index: usize = 0;
if input_bytes.len() >= 8 {
while input_index <= last_fast_index {
let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
// strip off 6 bits at a time for the first 6 bytes
unsafe {
std::ptr::write(output_ptr, charset[((input_chunk >> 58) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(1), charset[((input_chunk >> 52) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(2), charset[((input_chunk >> 46) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(3), charset[((input_chunk >> 40) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(4), charset[((input_chunk >> 34) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(5), charset[((input_chunk >> 28) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(6), charset[((input_chunk >> 22) & 0x3F) as usize]);
std::ptr::write(output_ptr.offset(7), charset[((input_chunk >> 16) & 0x3F) as usize]);
output_ptr = output_ptr.offset(8);
}
input_index += input_chunk_len;
fast_loop_output_buf_len += 8;
}
}
unsafe {
// expand len to include the bytes we just wrote
raw.set_len(fast_loop_output_buf_len);
}
// encode the 0 to 7 bytes left after the fast loop
let rem = input_bytes.len() % 3;
let start_of_rem = input_bytes.len() - rem;
// start at the first index not handled by fast loop, which may be 0.
let mut leftover_index = input_index;
while leftover_index < start_of_rem {
raw.push(charset[(input_bytes[leftover_index] >> 2) as usize]);
raw.push(charset[((input_bytes[leftover_index] << 4 | input_bytes[leftover_index + 1] >> 4) & 0x3f) as usize]);
raw.push(charset[((input_bytes[leftover_index + 1] << 2 | input_bytes[leftover_index + 2] >> 6) & 0x3f) as usize]);
raw.push(charset[(input_bytes[leftover_index + 2] & 0x3f) as usize]);
leftover_index += 3;
}
if rem == 2 {
raw.push(charset[(input_bytes[start_of_rem] >> 2) as usize]);
raw.push(charset[((input_bytes[start_of_rem] << 4 | input_bytes[start_of_rem + 1] >> 4) & 0x3f) as usize]);
raw.push(charset[(input_bytes[start_of_rem + 1] << 2 & 0x3f) as usize]);
} else if rem == 1 {
raw.push(charset[(input_bytes[start_of_rem] >> 2) as usize]);
raw.push(charset[(input_bytes[start_of_rem] << 4 & 0x3f) as usize]);
}
if config.pad {
for _ in 0..((3 - rem) % 3) {
raw.push(0x3d);
}
}
//TODO FIXME this does the wrong thing for nonempty buffers
if orig_buf_len == 0 {
if let LineWrap::Wrap(line_size, line_end) = config.line_wrap {
let len = raw.len();
let mut i = 0;
let mut j = 0;
while i < len {
if i > 0 && i % line_size == 0 {
match line_end {
LineEnding::LF => { raw.insert(j, b'\n'); j += 1; }
LineEnding::CRLF => { raw.insert(j, b'\r'); raw.insert(j + 1, b'\n'); j += 2; }
}
}
i += 1;
j += 1;
}
}
}
}
///Decode from string reference as octets.
///Returns a Result containing a Vec<u8>.
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let bytes = base64::decode_config("aGVsbG8gd29ybGR+Cg==", base64::STANDARD).unwrap();
/// println!("{:?}", bytes);
///
/// let bytes_url = base64::decode_config("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE).unwrap();
/// println!("{:?}", bytes_url);
///}
///```
pub fn decode_config<T: ?Sized + AsRef<[u8]>>(input: &T, config: Config) -> Result<Vec<u8>, DecodeError> {
let mut buffer = Vec::<u8>::with_capacity(input.as_ref().len() * 4 / 3);
decode_config_buf(input, config, &mut buffer).map(|_| buffer)
}
///Decode from string reference as octets.
///Writes into the supplied buffer to avoid allocation.
///Returns a Result containing an empty tuple, aka ().
///
///# Example
///
///```rust
///extern crate base64;
///
///fn main() {
/// let mut buffer = Vec::<u8>::new();
/// base64::decode_config_buf("aGVsbG8gd29ybGR+Cg==", base64::STANDARD, &mut buffer).unwrap();
/// println!("{:?}", buffer);
///
/// buffer.clear();
///
/// base64::decode_config_buf("aGVsbG8gaW50ZXJuZXR-Cg==", base64::URL_SAFE, &mut buffer).unwrap();
/// println!("{:?}", buffer);
///}
///```
pub fn decode_config_buf<T: ?Sized + AsRef<[u8]>>(input: &T,
config: Config,
buffer: &mut Vec<u8>)
-> Result<(), DecodeError> {
let mut input_copy;
let input_bytes = if config.strip_whitespace {
input_copy = Vec::<u8>::with_capacity(input.as_ref().len());
input_copy.extend(input.as_ref().iter().filter(|b| !b" \n\t\r\x0b\x0c".contains(b)));
input_copy.as_ref()
} else {
input.as_ref()
};
let ref decode_table = match config.char_set {
CharacterSet::Standard => tables::STANDARD_DECODE,
CharacterSet::UrlSafe => tables::URL_SAFE_DECODE,
};
buffer.reserve(input_bytes.len() * 3 / 4);
// the fast loop only handles complete chunks of 8 input bytes without padding
let chunk_len = 8;
let decoded_chunk_len = 6;
let remainder_len = input_bytes.len() % chunk_len;
let trailing_bytes_to_skip = if remainder_len == 0 {
// if input is a multiple of the chunk size, ignore the last chunk as it may have padding
chunk_len
} else {
remainder_len
};
let length_of_full_chunks = input_bytes.len().saturating_sub(trailing_bytes_to_skip);
let starting_output_index = buffer.len();
// Resize to hold decoded output from fast loop. Need the extra two bytes because
// we write a full 8 bytes for the last 6-byte decoded chunk and then truncate off two
let new_size = starting_output_index
+ length_of_full_chunks / chunk_len * decoded_chunk_len
+ (chunk_len - decoded_chunk_len);
buffer.resize(new_size, 0);
let mut output_index = starting_output_index;
{
let buffer_slice = buffer.as_mut_slice();
let mut input_index = 0;
// initial value is never used; always set if fast loop breaks
let mut bad_byte_index: usize = 0;
// a non-invalid value means it's not an error if fast loop never runs
let mut morsel: u8 = 0;
// fast loop of 8 bytes at a time
while input_index < length_of_full_chunks {
let mut accum: u64;
let input_chunk = BigEndian::read_u64(&input_bytes[input_index..(input_index + 8)]);
morsel = decode_table[(input_chunk >> 56) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index;
break;
};
accum = (morsel as u64) << 58;
morsel = decode_table[(input_chunk >> 48 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 1;
break;
};
accum |= (morsel as u64) << 52;
morsel = decode_table[(input_chunk >> 40 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 2;
break;
};
accum |= (morsel as u64) << 46;
morsel = decode_table[(input_chunk >> 32 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 3;
break;
};
accum |= (morsel as u64) << 40;
morsel = decode_table[(input_chunk >> 24 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 4;
break;
};
accum |= (morsel as u64) << 34;
morsel = decode_table[(input_chunk >> 16 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 5;
break;
};
accum |= (morsel as u64) << 28;
morsel = decode_table[(input_chunk >> 8 & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 6;
break;
};
accum |= (morsel as u64) << 22;
morsel = decode_table[(input_chunk & 0xFF) as usize];
if morsel == tables::INVALID_VALUE {
bad_byte_index = input_index + 7;
break;
};
accum |= (morsel as u64) << 16;
BigEndian::write_u64(&mut buffer_slice[(output_index)..(output_index + 8)],
accum);
output_index += 6;
input_index += chunk_len;
};
if morsel == tables::INVALID_VALUE {
// we got here from a break
return Err(DecodeError::InvalidByte(bad_byte_index, input_bytes[bad_byte_index]));
}
}
// Truncate off the last two bytes from writing the last u64.
// Unconditional because we added on the extra 2 bytes in the resize before the loop,
// so it will never underflow.
let new_len = buffer.len() - (chunk_len - decoded_chunk_len);
buffer.truncate(new_len);
// handle leftovers (at most 8 bytes, decoded to 6).
// Use a u64 as a stack-resident 8 bytes buffer.
let mut leftover_bits: u64 = 0;
let mut morsels_in_leftover = 0;
let mut padding_bytes = 0;
let mut first_padding_index: usize = 0;
for (i, b) in input_bytes[length_of_full_chunks..].iter().enumerate() {
// '=' padding
if *b == 0x3D {
// There can be bad padding in a few ways:
// 1 - Padding with non-padding characters after it
// 2 - Padding after zero or one non-padding characters before it
// in the current quad.
// 3 - More than two characters of padding. If 3 or 4 padding chars
// are in the same quad, that implies it will be caught by #2.
// If it spreads from one quad to another, it will be caught by
// #2 in the second quad.
if i % 4 < 2 {
// Check for case #2.
// TODO InvalidPadding error
return Err(DecodeError::InvalidByte(length_of_full_chunks + i, *b));
};
if padding_bytes == 0 {
first_padding_index = i;
};
padding_bytes += 1;
continue;
};
// Check for case #1.
// To make '=' handling consistent with the main loop, don't allow
// non-suffix '=' in trailing chunk either. Report error as first
// erroneous padding.
if padding_bytes > 0 {
return Err(DecodeError::InvalidByte(
length_of_full_chunks + first_padding_index, 0x3D));
};
// can use up to 8 * 6 = 48 bits of the u64, if last chunk has no padding.
// To minimize shifts, pack the leftovers from left to right.
let shift = 64 - (morsels_in_leftover + 1) * 6;
// tables are all 256 elements, cannot overflow from a u8 index
let morsel = decode_table[*b as usize];
if morsel == tables::INVALID_VALUE {
return Err(DecodeError::InvalidByte(length_of_full_chunks + i, *b));
};
leftover_bits |= (morsel as u64) << shift;
morsels_in_leftover += 1;
};
let leftover_bits_ready_to_append = match morsels_in_leftover {
0 => 0,
1 => return Err(DecodeError::InvalidLength),
2 => 8,
3 => 16,
4 => 24,
5 => return Err(DecodeError::InvalidLength),
6 => 32,
7 => 40,
8 => 48,
_ => panic!("Impossible: must only have 0 to 4 input bytes in last quad")
};
let mut leftover_bits_appended_to_buf = 0;
while leftover_bits_appended_to_buf < leftover_bits_ready_to_append {
// `as` simply truncates the higher bits, which is what we want here
let selected_bits = (leftover_bits >> (56 - leftover_bits_appended_to_buf)) as u8;
buffer.push(selected_bits);
leftover_bits_appended_to_buf += 8;
};
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn encoded_size_correct() {
assert_eq!(Some(0), encoded_size(0, STANDARD));
assert_eq!(Some(4), encoded_size(1, STANDARD));
assert_eq!(Some(4), encoded_size(2, STANDARD));
assert_eq!(Some(4), encoded_size(3, STANDARD));
assert_eq!(Some(8), encoded_size(4, STANDARD));
assert_eq!(Some(8), encoded_size(5, STANDARD));
assert_eq!(Some(8), encoded_size(6, STANDARD));
assert_eq!(Some(12), encoded_size(7, STANDARD));
assert_eq!(Some(12), encoded_size(8, STANDARD));
assert_eq!(Some(12), encoded_size(9, STANDARD));
assert_eq!(Some(72), encoded_size(54, STANDARD));
assert_eq!(Some(76), encoded_size(55, STANDARD));
assert_eq!(Some(76), encoded_size(56, STANDARD));
assert_eq!(Some(76), encoded_size(57, STANDARD));
assert_eq!(Some(80), encoded_size(58, STANDARD));
}
#[test]
fn encoded_size_correct_mime() {
assert_eq!(Some(0), encoded_size(0, MIME));
assert_eq!(Some(4), encoded_size(1, MIME));
assert_eq!(Some(4), encoded_size(2, MIME));
assert_eq!(Some(4), encoded_size(3, MIME));
assert_eq!(Some(8), encoded_size(4, MIME));
assert_eq!(Some(8), encoded_size(5, MIME));
assert_eq!(Some(8), encoded_size(6, MIME));
assert_eq!(Some(12), encoded_size(7, MIME));
assert_eq!(Some(12), encoded_size(8, MIME));
assert_eq!(Some(12), encoded_size(9, MIME));
assert_eq!(Some(72), encoded_size(54, MIME));
assert_eq!(Some(78), encoded_size(55, MIME));
assert_eq!(Some(78), encoded_size(56, MIME));
assert_eq!(Some(78), encoded_size(57, MIME));
assert_eq!(Some(82), encoded_size(58, MIME));
}
#[test]
fn encoded_size_correct_lf() {
let config = Config::new(
CharacterSet::Standard,
true,
false,
LineWrap::Wrap(76, LineEnding::LF)
);
assert_eq!(Some(0), encoded_size(0, config));
assert_eq!(Some(4), encoded_size(1, config));
assert_eq!(Some(4), encoded_size(2, config));
assert_eq!(Some(4), encoded_size(3, config));
assert_eq!(Some(8), encoded_size(4, config));
assert_eq!(Some(8), encoded_size(5, config));
assert_eq!(Some(8), encoded_size(6, config));
assert_eq!(Some(12), encoded_size(7, config));
assert_eq!(Some(12), encoded_size(8, config));
assert_eq!(Some(12), encoded_size(9, config));
assert_eq!(Some(72), encoded_size(54, config));
assert_eq!(Some(77), encoded_size(55, config));
assert_eq!(Some(77), encoded_size(56, config));
assert_eq!(Some(77), encoded_size(57, config));
assert_eq!(Some(81), encoded_size(58, config));
}
#[test]
fn encoded_size_overflow() {
assert_eq!(None, encoded_size(std::usize::MAX, STANDARD));
}
}
|
#![deny(missing_docs)]
//! A Glutin window back-end for the Piston game engine.
extern crate glutin;
extern crate gl;
extern crate input;
extern crate window;
extern crate shader_version;
use std::collections::VecDeque;
use std::error::Error;
// External crates.
use input::{
keyboard,
ButtonArgs,
ButtonState,
CloseArgs,
Event,
MouseButton,
Button,
Input,
FileDrag,
ResizeArgs,
};
use window::{
BuildFromWindowSettings,
OpenGLWindow,
Window,
AdvancedWindow,
ProcAddress,
WindowSettings,
Size,
Position,
Api,
UnsupportedGraphicsApiError,
};
use glutin::GlRequest;
use std::time::Duration;
use std::thread;
pub use shader_version::OpenGL;
use std::mem::MaybeUninit;
/// Contains stuff for game window.
pub struct GlutinWindow {
/// The window.
pub ctx: glutin::ContextWrapper<glutin::PossiblyCurrent, glutin::Window>,
// The back-end does not remember the title.
title: String,
exit_on_esc: bool,
should_close: bool,
automatic_close: bool,
// Used to fake capturing of cursor,
// to get relative mouse events.
is_capturing_cursor: bool,
// Stores the last known cursor position.
last_cursor_pos: Option<[f64; 2]>,
// Stores relative coordinates to emit on next poll.
mouse_relative: Option<(f64, f64)>,
// Used to emit cursor event after enter/leave.
cursor_pos: Option<[f64; 2]>,
// Polls events from window.
events_loop: glutin::EventsLoop,
// Stores list of events ready for processing.
events: VecDeque<glutin::Event>,
}
fn window_builder_from_settings(settings: &WindowSettings) -> glutin::WindowBuilder {
let size = settings.get_size();
let mut builder = glutin::WindowBuilder::new()
.with_dimensions((size.width, size.height).into())
.with_decorations(settings.get_decorated())
.with_multitouch()
.with_title(settings.get_title())
.with_resizable(settings.get_resizable());
if settings.get_fullscreen() {
let events_loop = glutin::EventsLoop::new();
builder = builder.with_fullscreen(Some(events_loop.get_primary_monitor()));
}
builder
}
fn context_builder_from_settings(
settings: &WindowSettings
) -> Result<glutin::ContextBuilder<glutin::NotCurrent>, Box<dyn Error>> {
let api = settings.get_maybe_graphics_api().unwrap_or(Api::opengl(3, 2));
if api.api != "OpenGL" {
return Err(UnsupportedGraphicsApiError {
found: api.api,
expected: vec!["OpenGL".into()]
}.into());
};
let mut builder = glutin::ContextBuilder::new()
.with_gl(GlRequest::GlThenGles {
opengl_version: (api.major as u8, api.minor as u8),
opengles_version: (api.major as u8, api.minor as u8),
})
.with_srgb(settings.get_srgb());
let samples = settings.get_samples();
if settings.get_vsync() {
builder = builder.with_vsync(true);
}
if samples != 0 {
builder = builder.with_multisampling(samples as u16);
}
Ok(builder)
}
impl GlutinWindow {
/// Creates a new game window for Glutin.
pub fn new(settings: &WindowSettings) -> Result<Self, Box<dyn Error>> {
let events_loop = glutin::EventsLoop::new();
let title = settings.get_title();
let exit_on_esc = settings.get_exit_on_esc();
let window_builder = window_builder_from_settings(&settings);
let context_builder = context_builder_from_settings(&settings)?;
let ctx = context_builder.build_windowed(window_builder, &events_loop);
let ctx = match ctx {
Ok(ctx) => ctx,
Err(_) => {
let settings = settings.clone().samples(0);
let window_builder = window_builder_from_settings(&settings);
let context_builder = context_builder_from_settings(&settings)?;
let ctx = context_builder.build_windowed(window_builder, &events_loop)?;
ctx
}
};
let ctx = unsafe { ctx.make_current().map_err(|(_, err)| err)? };
// Load the OpenGL function pointers.
gl::load_with(|s| ctx.get_proc_address(s) as *const _);
Ok(GlutinWindow {
ctx,
title,
exit_on_esc,
should_close: false,
automatic_close: settings.get_automatic_close(),
cursor_pos: None,
is_capturing_cursor: false,
last_cursor_pos: None,
mouse_relative: None,
events_loop,
events: VecDeque::new(),
})
}
/// Creates a game window from a pre-existing Glutin event loop and window builder.
pub fn from_raw(settings: &WindowSettings, events_loop: glutin::EventsLoop, window_builder: glutin::WindowBuilder) -> Result<Self, Box<dyn Error>> {
let title = settings.get_title();
let exit_on_esc = settings.get_exit_on_esc();
let context_builder = context_builder_from_settings(&settings)?;
let ctx = context_builder.build_windowed(window_builder, &events_loop)?;
let ctx = unsafe { ctx.make_current().map_err(|(_, err)| err)? };
// Load the OpenGL function pointers.
gl::load_with(|s| ctx.get_proc_address(s) as *const _);
Ok(GlutinWindow {
ctx,
title,
exit_on_esc,
should_close: false,
automatic_close: settings.get_automatic_close(),
cursor_pos: None,
is_capturing_cursor: false,
last_cursor_pos: None,
mouse_relative: None,
events_loop,
events: VecDeque::new(),
})
}
fn wait_event(&mut self) -> Event {
// First check for and handle any pending events.
if let Some(event) = self.poll_event() {
return event;
}
loop {
{
let ref mut events = self.events;
self.events_loop.run_forever(|ev| {
events.push_back(ev);
glutin::ControlFlow::Break
});
}
if let Some(event) = self.poll_event() {
return event;
}
}
}
fn wait_event_timeout(&mut self, timeout: Duration) -> Option<Event> {
// First check for and handle any pending events.
if let Some(event) = self.poll_event() {
return Some(event);
}
// Schedule wake up when time is out.
let events_loop_proxy = self.events_loop.create_proxy();
thread::spawn(move || {
thread::sleep(timeout);
// Wakeup can fail only if the event loop went away.
events_loop_proxy.wakeup().ok();
});
{
let ref mut events = self.events;
self.events_loop.run_forever(|ev| {
events.push_back(ev);
glutin::ControlFlow::Break
});
}
self.poll_event()
}
fn poll_event(&mut self) -> Option<Event> {
use glutin::Event as E;
use glutin::WindowEvent as WE;
// Loop to skip unknown events.
loop {
let event = self.pre_pop_front_event();
if event.is_some() {return event.map(|x| Event::Input(x, None));}
if self.events.len() == 0 {
let ref mut events = self.events;
self.events_loop.poll_events(|ev| events.push_back(ev));
}
let mut ev = self.events.pop_front();
if self.is_capturing_cursor &&
self.last_cursor_pos.is_none() {
if let Some(E::WindowEvent {
event: WE::CursorMoved{ position, ..}, ..
}) = ev {
// Ignore this event since mouse positions
// should not be emitted when capturing cursor.
self.last_cursor_pos = Some([position.x, position.y]);
if self.events.len() == 0 {
let ref mut events = self.events;
self.events_loop.poll_events(|ev| events.push_back(ev));
}
ev = self.events.pop_front();
}
}
let mut unknown = false;
let event = self.handle_event(ev, &mut unknown);
if unknown {continue};
return event.map(|x| Event::Input(x, None));
}
}
// These events are emitted before popping a new event from the queue.
// This is because Piston handles some events separately.
fn pre_pop_front_event(&mut self) -> Option<Input> {
use input::Motion;
// Check for a pending mouse cursor move event.
if let Some(pos) = self.cursor_pos {
self.cursor_pos = None;
return Some(Input::Move(Motion::MouseCursor(pos)));
}
// Check for a pending relative mouse move event.
if let Some((x, y)) = self.mouse_relative {
self.mouse_relative = None;
return Some(Input::Move(Motion::MouseRelative([x, y])));
}
None
}
/// Convert an incoming Glutin event to Piston input.
/// Update cursor state if necessary.
///
/// The `unknown` flag is set to `true` when the event is not recognized.
/// This is used to poll another event to make the event loop logic sound.
/// When `unknown` is `true`, the return value is `None`.
fn handle_event(&mut self, ev: Option<glutin::Event>, unknown: &mut bool) -> Option<Input> {
use glutin::Event as E;
use glutin::WindowEvent as WE;
use glutin::MouseScrollDelta;
use input::{ Key, Motion };
match ev {
None => {
if self.is_capturing_cursor {
self.fake_capture();
}
None
}
Some(E::WindowEvent {
event: WE::Resized(size), ..
}) => {
let draw_size = self.draw_size();
Some(Input::Resize(ResizeArgs {
window_size: [size.width, size.height],
draw_size: draw_size.into(),
}))
},
Some(E::WindowEvent {
event: WE::ReceivedCharacter(ch), ..
}) => {
let string = match ch {
// Ignore control characters and return ascii for Text event (like sdl2).
'\u{7f}' | // Delete
'\u{1b}' | // Escape
'\u{8}' | // Backspace
'\r' | '\n' | '\t' => "".to_string(),
_ => ch.to_string()
};
Some(Input::Text(string))
},
Some(E::WindowEvent {
event: WE::Focused(focused), ..
}) =>
Some(Input::Focus(focused)),
Some(E::WindowEvent {
event: WE::KeyboardInput{
input: glutin::KeyboardInput{
state: glutin::ElementState::Pressed,
virtual_keycode: Some(key), scancode, ..
}, ..
}, ..
}) => {
let piston_key = map_key(key);
if let (true, Key::Escape) = (self.exit_on_esc, piston_key) {
self.should_close = true;
}
Some(Input::Button(ButtonArgs {
state: ButtonState::Press,
button: Button::Keyboard(piston_key),
scancode: Some(scancode as i32),
}))
},
Some(E::WindowEvent {
event: WE::KeyboardInput{
input: glutin::KeyboardInput{
state: glutin::ElementState::Released,
virtual_keycode: Some(key), scancode, ..
}, ..
}, ..
}) =>
Some(Input::Button(ButtonArgs {
state: ButtonState::Release,
button: Button::Keyboard(map_key(key)),
scancode: Some(scancode as i32),
})),
Some(E::WindowEvent {
event: WE::Touch(glutin::Touch { phase, location, id, .. }), ..
}) => {
use glutin::TouchPhase;
use input::{Touch, TouchArgs};
Some(Input::Move(Motion::Touch(TouchArgs::new(
0, id as i64, [location.x, location.y], 1.0, match phase {
TouchPhase::Started => Touch::Start,
TouchPhase::Moved => Touch::Move,
TouchPhase::Ended => Touch::End,
TouchPhase::Cancelled => Touch::Cancel
}
))))
}
Some(E::WindowEvent {
event: WE::CursorMoved{position, ..}, ..
}) => {
let x = position.x;
let y = position.y;
if let Some(pos) = self.last_cursor_pos {
let dx = x - pos[0];
let dy = y - pos[1];
if self.is_capturing_cursor {
self.last_cursor_pos = Some([x, y]);
self.fake_capture();
// Skip normal mouse movement and emit relative motion only.
return Some(Input::Move(Motion::MouseRelative([dx as f64, dy as f64])));
}
// Send relative mouse movement next time.
self.mouse_relative = Some((dx as f64, dy as f64));
}
self.last_cursor_pos = Some([x, y]);
Some(Input::Move(Motion::MouseCursor([x, y])))
}
Some(E::WindowEvent {
event: WE::CursorEntered{..}, ..
}) => Some(Input::Cursor(true)),
Some(E::WindowEvent {
event: WE::CursorLeft{..}, ..
}) => Some(Input::Cursor(false)),
Some(E::WindowEvent {
event: WE::MouseWheel{delta: MouseScrollDelta::PixelDelta(pos), ..}, ..
}) => Some(Input::Move(Motion::MouseScroll([pos.x as f64, pos.y as f64]))),
Some(E::WindowEvent {
event: WE::MouseWheel{delta: MouseScrollDelta::LineDelta(x, y), ..}, ..
}) => Some(Input::Move(Motion::MouseScroll([x as f64, y as f64]))),
Some(E::WindowEvent {
event: WE::MouseInput{state: glutin::ElementState::Pressed, button, ..}, ..
}) => Some(Input::Button(ButtonArgs {
state: ButtonState::Press,
button: Button::Mouse(map_mouse(button)),
scancode: None,
})),
Some(E::WindowEvent {
event: WE::MouseInput{state: glutin::ElementState::Released, button, ..}, ..
}) => Some(Input::Button(ButtonArgs {
state: ButtonState::Release,
button: Button::Mouse(map_mouse(button)),
scancode: None,
})),
Some(E::WindowEvent {
event: WE::HoveredFile(path), ..
}) => Some(Input::FileDrag(FileDrag::Hover(path))),
Some(E::WindowEvent {
event: WE::DroppedFile(path), ..
}) => Some(Input::FileDrag(FileDrag::Drop(path))),
Some(E::WindowEvent {
event: WE::HoveredFileCancelled, ..
}) => Some(Input::FileDrag(FileDrag::Cancel)),
Some(E::WindowEvent { event: WE::CloseRequested, .. }) => {
if self.automatic_close {
self.should_close = true;
}
Some(Input::Close(CloseArgs))
}
_ => {
*unknown = true;
None
}
}
}
fn fake_capture(&mut self) {
if let Some(pos) = self.last_cursor_pos {
// Fake capturing of cursor.
let size = self.size();
let cx = size.width / 2.0;
let cy = size.height / 2.0;
let dx = cx - pos[0];
let dy = cy - pos[1];
if dx != 0.0 || dy != 0.0 {
if let Ok(_) = self.ctx.window().set_cursor_position((cx, cy).into()) {
self.last_cursor_pos = Some([cx, cy]);
}
}
}
}
}
impl Window for GlutinWindow {
fn size(&self) -> Size {
let size = self.ctx.window().get_inner_size().unwrap_or((0.0, 0.0).into());
(size.width, size.height).into()
}
fn draw_size(&self) -> Size {
let size = self.ctx.window()
.get_inner_size()
.unwrap_or((0.0, 0.0).into())
.to_physical(self.ctx.window().get_hidpi_factor());
(size.width, size.height).into()
}
fn should_close(&self) -> bool { self.should_close }
fn set_should_close(&mut self, value: bool) { self.should_close = value; }
fn swap_buffers(&mut self) { let _ = self.ctx.swap_buffers(); }
fn wait_event(&mut self) -> Event { self.wait_event() }
fn wait_event_timeout(&mut self, timeout: Duration) -> Option<Event> {
self.wait_event_timeout(timeout)
}
fn poll_event(&mut self) -> Option<Event> { self.poll_event() }
}
impl BuildFromWindowSettings for GlutinWindow {
fn build_from_window_settings(settings: &WindowSettings)
-> Result<Self, Box<dyn Error>> {
GlutinWindow::new(settings)
}
}
impl AdvancedWindow for GlutinWindow {
fn get_title(&self) -> String { self.title.clone() }
fn set_title(&mut self, value: String) {
self.title = value;
self.ctx.window().set_title(&self.title);
}
fn get_exit_on_esc(&self) -> bool { self.exit_on_esc }
fn set_exit_on_esc(&mut self, value: bool) { self.exit_on_esc = value; }
fn get_automatic_close(&self) -> bool { self.automatic_close }
fn set_automatic_close(&mut self, value: bool) { self.automatic_close = value; }
fn set_capture_cursor(&mut self, value: bool) {
// Normally we would call `.grab_cursor(true)`
// but since relative mouse events does not work,
// the capturing of cursor is faked by hiding the cursor
// and setting the position to the center of window.
self.is_capturing_cursor = value;
self.ctx.window().hide_cursor(value);
if value {
self.fake_capture();
}
}
fn show(&mut self) { self.ctx.window().show(); }
fn hide(&mut self) { self.ctx.window().hide(); }
fn get_position(&self) -> Option<Position> {
self.ctx.window().get_position().map(|pos|
Position { x: pos.x as i32, y: pos.y as i32 })
}
fn set_position<P: Into<Position>>(&mut self, pos: P) {
let pos: Position = pos.into();
self.ctx.window().set_position((pos.x, pos.y).into());
}
fn set_size<S: Into<Size>>(&mut self, size: S) {
let size: Size = size.into();
self.ctx.window().set_inner_size((
size.width as f64,
size.height as f64,
).into());
}
}
impl OpenGLWindow for GlutinWindow {
fn get_proc_address(&mut self, proc_name: &str) -> ProcAddress {
self.ctx.get_proc_address(proc_name) as *const _
}
fn is_current(&self) -> bool {
self.ctx.is_current()
}
#[allow(invalid_value)]
fn make_current(&mut self) {
use std::mem::{replace, forget};
let ctx = replace(&mut self.ctx, unsafe { MaybeUninit::<glutin::ContextWrapper<glutin::PossiblyCurrent, glutin::Window>>::zeroed().assume_init() });
forget(replace(&mut self.ctx, unsafe {ctx.make_current().unwrap()}));
}
}
/// Maps Glutin's key to Piston's key.
pub fn map_key(keycode: glutin::VirtualKeyCode) -> keyboard::Key {
use input::keyboard::Key;
use glutin::VirtualKeyCode as K;
match keycode {
K::Key0 => Key::D0,
K::Key1 => Key::D1,
K::Key2 => Key::D2,
K::Key3 => Key::D3,
K::Key4 => Key::D4,
K::Key5 => Key::D5,
K::Key6 => Key::D6,
K::Key7 => Key::D7,
K::Key8 => Key::D8,
K::Key9 => Key::D9,
K::A => Key::A,
K::B => Key::B,
K::C => Key::C,
K::D => Key::D,
K::E => Key::E,
K::F => Key::F,
K::G => Key::G,
K::H => Key::H,
K::I => Key::I,
K::J => Key::J,
K::K => Key::K,
K::L => Key::L,
K::M => Key::M,
K::N => Key::N,
K::O => Key::O,
K::P => Key::P,
K::Q => Key::Q,
K::R => Key::R,
K::S => Key::S,
K::T => Key::T,
K::U => Key::U,
K::V => Key::V,
K::W => Key::W,
K::X => Key::X,
K::Y => Key::Y,
K::Z => Key::Z,
K::Apostrophe => Key::Unknown,
K::Backslash => Key::Backslash,
K::Back => Key::Backspace,
// K::CapsLock => Key::CapsLock,
K::Delete => Key::Delete,
K::Comma => Key::Comma,
K::Down => Key::Down,
K::End => Key::End,
K::Return => Key::Return,
K::Equals => Key::Equals,
K::Escape => Key::Escape,
K::F1 => Key::F1,
K::F2 => Key::F2,
K::F3 => Key::F3,
K::F4 => Key::F4,
K::F5 => Key::F5,
K::F6 => Key::F6,
K::F7 => Key::F7,
K::F8 => Key::F8,
K::F9 => Key::F9,
K::F10 => Key::F10,
K::F11 => Key::F11,
K::F12 => Key::F12,
K::F13 => Key::F13,
K::F14 => Key::F14,
K::F15 => Key::F15,
K::F16 => Key::F16,
K::F17 => Key::F17,
K::F18 => Key::F18,
K::F19 => Key::F19,
K::F20 => Key::F20,
K::F21 => Key::F21,
K::F22 => Key::F22,
K::F23 => Key::F23,
K::F24 => Key::F24,
// Possibly next code.
// K::F25 => Key::Unknown,
K::Numpad0 => Key::NumPad0,
K::Numpad1 => Key::NumPad1,
K::Numpad2 => Key::NumPad2,
K::Numpad3 => Key::NumPad3,
K::Numpad4 => Key::NumPad4,
K::Numpad5 => Key::NumPad5,
K::Numpad6 => Key::NumPad6,
K::Numpad7 => Key::NumPad7,
K::Numpad8 => Key::NumPad8,
K::Numpad9 => Key::NumPad9,
K::NumpadComma => Key::NumPadDecimal,
K::Divide => Key::NumPadDivide,
K::Multiply => Key::NumPadMultiply,
K::Subtract => Key::NumPadMinus,
K::Add => Key::NumPadPlus,
K::NumpadEnter => Key::NumPadEnter,
K::NumpadEquals => Key::NumPadEquals,
K::LShift => Key::LShift,
K::LControl => Key::LCtrl,
K::LAlt => Key::LAlt,
K::RShift => Key::RShift,
K::RControl => Key::RCtrl,
K::RAlt => Key::RAlt,
// Map to backslash?
// K::GraveAccent => Key::Unknown,
K::Home => Key::Home,
K::Insert => Key::Insert,
K::Left => Key::Left,
K::LBracket => Key::LeftBracket,
// K::Menu => Key::Menu,
K::Minus => Key::Minus,
K::Numlock => Key::NumLockClear,
K::PageDown => Key::PageDown,
K::PageUp => Key::PageUp,
K::Pause => Key::Pause,
K::Period => Key::Period,
K::Snapshot => Key::PrintScreen,
K::Right => Key::Right,
K::RBracket => Key::RightBracket,
K::Scroll => Key::ScrollLock,
K::Semicolon => Key::Semicolon,
K::Slash => Key::Slash,
K::Space => Key::Space,
K::Tab => Key::Tab,
K::Up => Key::Up,
// K::World1 => Key::Unknown,
// K::World2 => Key::Unknown,
_ => Key::Unknown,
}
}
/// Maps Glutin's mouse button to Piston's mouse button.
pub fn map_mouse(mouse_button: glutin::MouseButton) -> MouseButton {
use glutin::MouseButton as M;
match mouse_button {
M::Left => MouseButton::Left,
M::Right => MouseButton::Right,
M::Middle => MouseButton::Middle,
M::Other(0) => MouseButton::X1,
M::Other(1) => MouseButton::X2,
M::Other(2) => MouseButton::Button6,
M::Other(3) => MouseButton::Button7,
M::Other(4) => MouseButton::Button8,
_ => MouseButton::Unknown
}
}
Fix UB in `make_current`
#![deny(missing_docs)]
//! A Glutin window back-end for the Piston game engine.
extern crate glutin;
extern crate gl;
extern crate input;
extern crate window;
extern crate shader_version;
use std::collections::VecDeque;
use std::error::Error;
// External crates.
use input::{
keyboard,
ButtonArgs,
ButtonState,
CloseArgs,
Event,
MouseButton,
Button,
Input,
FileDrag,
ResizeArgs,
};
use window::{
BuildFromWindowSettings,
OpenGLWindow,
Window,
AdvancedWindow,
ProcAddress,
WindowSettings,
Size,
Position,
Api,
UnsupportedGraphicsApiError,
};
use glutin::GlRequest;
use std::time::Duration;
use std::thread;
pub use shader_version::OpenGL;
/// Contains stuff for game window.
pub struct GlutinWindow {
/// The window.
pub ctx: glutin::ContextWrapper<glutin::PossiblyCurrent, glutin::Window>,
// The back-end does not remember the title.
title: String,
exit_on_esc: bool,
should_close: bool,
automatic_close: bool,
// Used to fake capturing of cursor,
// to get relative mouse events.
is_capturing_cursor: bool,
// Stores the last known cursor position.
last_cursor_pos: Option<[f64; 2]>,
// Stores relative coordinates to emit on next poll.
mouse_relative: Option<(f64, f64)>,
// Used to emit cursor event after enter/leave.
cursor_pos: Option<[f64; 2]>,
// Polls events from window.
events_loop: glutin::EventsLoop,
// Stores list of events ready for processing.
events: VecDeque<glutin::Event>,
}
fn window_builder_from_settings(settings: &WindowSettings) -> glutin::WindowBuilder {
let size = settings.get_size();
let mut builder = glutin::WindowBuilder::new()
.with_dimensions((size.width, size.height).into())
.with_decorations(settings.get_decorated())
.with_multitouch()
.with_title(settings.get_title())
.with_resizable(settings.get_resizable());
if settings.get_fullscreen() {
let events_loop = glutin::EventsLoop::new();
builder = builder.with_fullscreen(Some(events_loop.get_primary_monitor()));
}
builder
}
fn context_builder_from_settings(
settings: &WindowSettings
) -> Result<glutin::ContextBuilder<glutin::NotCurrent>, Box<dyn Error>> {
let api = settings.get_maybe_graphics_api().unwrap_or(Api::opengl(3, 2));
if api.api != "OpenGL" {
return Err(UnsupportedGraphicsApiError {
found: api.api,
expected: vec!["OpenGL".into()]
}.into());
};
let mut builder = glutin::ContextBuilder::new()
.with_gl(GlRequest::GlThenGles {
opengl_version: (api.major as u8, api.minor as u8),
opengles_version: (api.major as u8, api.minor as u8),
})
.with_srgb(settings.get_srgb());
let samples = settings.get_samples();
if settings.get_vsync() {
builder = builder.with_vsync(true);
}
if samples != 0 {
builder = builder.with_multisampling(samples as u16);
}
Ok(builder)
}
impl GlutinWindow {
/// Creates a new game window for Glutin.
pub fn new(settings: &WindowSettings) -> Result<Self, Box<dyn Error>> {
let events_loop = glutin::EventsLoop::new();
let title = settings.get_title();
let exit_on_esc = settings.get_exit_on_esc();
let window_builder = window_builder_from_settings(&settings);
let context_builder = context_builder_from_settings(&settings)?;
let ctx = context_builder.build_windowed(window_builder, &events_loop);
let ctx = match ctx {
Ok(ctx) => ctx,
Err(_) => {
let settings = settings.clone().samples(0);
let window_builder = window_builder_from_settings(&settings);
let context_builder = context_builder_from_settings(&settings)?;
let ctx = context_builder.build_windowed(window_builder, &events_loop)?;
ctx
}
};
let ctx = unsafe { ctx.make_current().map_err(|(_, err)| err)? };
// Load the OpenGL function pointers.
gl::load_with(|s| ctx.get_proc_address(s) as *const _);
Ok(GlutinWindow {
ctx,
title,
exit_on_esc,
should_close: false,
automatic_close: settings.get_automatic_close(),
cursor_pos: None,
is_capturing_cursor: false,
last_cursor_pos: None,
mouse_relative: None,
events_loop,
events: VecDeque::new(),
})
}
/// Creates a game window from a pre-existing Glutin event loop and window builder.
pub fn from_raw(settings: &WindowSettings, events_loop: glutin::EventsLoop, window_builder: glutin::WindowBuilder) -> Result<Self, Box<dyn Error>> {
let title = settings.get_title();
let exit_on_esc = settings.get_exit_on_esc();
let context_builder = context_builder_from_settings(&settings)?;
let ctx = context_builder.build_windowed(window_builder, &events_loop)?;
let ctx = unsafe { ctx.make_current().map_err(|(_, err)| err)? };
// Load the OpenGL function pointers.
gl::load_with(|s| ctx.get_proc_address(s) as *const _);
Ok(GlutinWindow {
ctx,
title,
exit_on_esc,
should_close: false,
automatic_close: settings.get_automatic_close(),
cursor_pos: None,
is_capturing_cursor: false,
last_cursor_pos: None,
mouse_relative: None,
events_loop,
events: VecDeque::new(),
})
}
fn wait_event(&mut self) -> Event {
// First check for and handle any pending events.
if let Some(event) = self.poll_event() {
return event;
}
loop {
{
let ref mut events = self.events;
self.events_loop.run_forever(|ev| {
events.push_back(ev);
glutin::ControlFlow::Break
});
}
if let Some(event) = self.poll_event() {
return event;
}
}
}
fn wait_event_timeout(&mut self, timeout: Duration) -> Option<Event> {
// First check for and handle any pending events.
if let Some(event) = self.poll_event() {
return Some(event);
}
// Schedule wake up when time is out.
let events_loop_proxy = self.events_loop.create_proxy();
thread::spawn(move || {
thread::sleep(timeout);
// Wakeup can fail only if the event loop went away.
events_loop_proxy.wakeup().ok();
});
{
let ref mut events = self.events;
self.events_loop.run_forever(|ev| {
events.push_back(ev);
glutin::ControlFlow::Break
});
}
self.poll_event()
}
fn poll_event(&mut self) -> Option<Event> {
use glutin::Event as E;
use glutin::WindowEvent as WE;
// Loop to skip unknown events.
loop {
let event = self.pre_pop_front_event();
if event.is_some() {return event.map(|x| Event::Input(x, None));}
if self.events.len() == 0 {
let ref mut events = self.events;
self.events_loop.poll_events(|ev| events.push_back(ev));
}
let mut ev = self.events.pop_front();
if self.is_capturing_cursor &&
self.last_cursor_pos.is_none() {
if let Some(E::WindowEvent {
event: WE::CursorMoved{ position, ..}, ..
}) = ev {
// Ignore this event since mouse positions
// should not be emitted when capturing cursor.
self.last_cursor_pos = Some([position.x, position.y]);
if self.events.len() == 0 {
let ref mut events = self.events;
self.events_loop.poll_events(|ev| events.push_back(ev));
}
ev = self.events.pop_front();
}
}
let mut unknown = false;
let event = self.handle_event(ev, &mut unknown);
if unknown {continue};
return event.map(|x| Event::Input(x, None));
}
}
// These events are emitted before popping a new event from the queue.
// This is because Piston handles some events separately.
fn pre_pop_front_event(&mut self) -> Option<Input> {
use input::Motion;
// Check for a pending mouse cursor move event.
if let Some(pos) = self.cursor_pos {
self.cursor_pos = None;
return Some(Input::Move(Motion::MouseCursor(pos)));
}
// Check for a pending relative mouse move event.
if let Some((x, y)) = self.mouse_relative {
self.mouse_relative = None;
return Some(Input::Move(Motion::MouseRelative([x, y])));
}
None
}
/// Convert an incoming Glutin event to Piston input.
/// Update cursor state if necessary.
///
/// The `unknown` flag is set to `true` when the event is not recognized.
/// This is used to poll another event to make the event loop logic sound.
/// When `unknown` is `true`, the return value is `None`.
fn handle_event(&mut self, ev: Option<glutin::Event>, unknown: &mut bool) -> Option<Input> {
use glutin::Event as E;
use glutin::WindowEvent as WE;
use glutin::MouseScrollDelta;
use input::{ Key, Motion };
match ev {
None => {
if self.is_capturing_cursor {
self.fake_capture();
}
None
}
Some(E::WindowEvent {
event: WE::Resized(size), ..
}) => {
let draw_size = self.draw_size();
Some(Input::Resize(ResizeArgs {
window_size: [size.width, size.height],
draw_size: draw_size.into(),
}))
},
Some(E::WindowEvent {
event: WE::ReceivedCharacter(ch), ..
}) => {
let string = match ch {
// Ignore control characters and return ascii for Text event (like sdl2).
'\u{7f}' | // Delete
'\u{1b}' | // Escape
'\u{8}' | // Backspace
'\r' | '\n' | '\t' => "".to_string(),
_ => ch.to_string()
};
Some(Input::Text(string))
},
Some(E::WindowEvent {
event: WE::Focused(focused), ..
}) =>
Some(Input::Focus(focused)),
Some(E::WindowEvent {
event: WE::KeyboardInput{
input: glutin::KeyboardInput{
state: glutin::ElementState::Pressed,
virtual_keycode: Some(key), scancode, ..
}, ..
}, ..
}) => {
let piston_key = map_key(key);
if let (true, Key::Escape) = (self.exit_on_esc, piston_key) {
self.should_close = true;
}
Some(Input::Button(ButtonArgs {
state: ButtonState::Press,
button: Button::Keyboard(piston_key),
scancode: Some(scancode as i32),
}))
},
Some(E::WindowEvent {
event: WE::KeyboardInput{
input: glutin::KeyboardInput{
state: glutin::ElementState::Released,
virtual_keycode: Some(key), scancode, ..
}, ..
}, ..
}) =>
Some(Input::Button(ButtonArgs {
state: ButtonState::Release,
button: Button::Keyboard(map_key(key)),
scancode: Some(scancode as i32),
})),
Some(E::WindowEvent {
event: WE::Touch(glutin::Touch { phase, location, id, .. }), ..
}) => {
use glutin::TouchPhase;
use input::{Touch, TouchArgs};
Some(Input::Move(Motion::Touch(TouchArgs::new(
0, id as i64, [location.x, location.y], 1.0, match phase {
TouchPhase::Started => Touch::Start,
TouchPhase::Moved => Touch::Move,
TouchPhase::Ended => Touch::End,
TouchPhase::Cancelled => Touch::Cancel
}
))))
}
Some(E::WindowEvent {
event: WE::CursorMoved{position, ..}, ..
}) => {
let x = position.x;
let y = position.y;
if let Some(pos) = self.last_cursor_pos {
let dx = x - pos[0];
let dy = y - pos[1];
if self.is_capturing_cursor {
self.last_cursor_pos = Some([x, y]);
self.fake_capture();
// Skip normal mouse movement and emit relative motion only.
return Some(Input::Move(Motion::MouseRelative([dx as f64, dy as f64])));
}
// Send relative mouse movement next time.
self.mouse_relative = Some((dx as f64, dy as f64));
}
self.last_cursor_pos = Some([x, y]);
Some(Input::Move(Motion::MouseCursor([x, y])))
}
Some(E::WindowEvent {
event: WE::CursorEntered{..}, ..
}) => Some(Input::Cursor(true)),
Some(E::WindowEvent {
event: WE::CursorLeft{..}, ..
}) => Some(Input::Cursor(false)),
Some(E::WindowEvent {
event: WE::MouseWheel{delta: MouseScrollDelta::PixelDelta(pos), ..}, ..
}) => Some(Input::Move(Motion::MouseScroll([pos.x as f64, pos.y as f64]))),
Some(E::WindowEvent {
event: WE::MouseWheel{delta: MouseScrollDelta::LineDelta(x, y), ..}, ..
}) => Some(Input::Move(Motion::MouseScroll([x as f64, y as f64]))),
Some(E::WindowEvent {
event: WE::MouseInput{state: glutin::ElementState::Pressed, button, ..}, ..
}) => Some(Input::Button(ButtonArgs {
state: ButtonState::Press,
button: Button::Mouse(map_mouse(button)),
scancode: None,
})),
Some(E::WindowEvent {
event: WE::MouseInput{state: glutin::ElementState::Released, button, ..}, ..
}) => Some(Input::Button(ButtonArgs {
state: ButtonState::Release,
button: Button::Mouse(map_mouse(button)),
scancode: None,
})),
Some(E::WindowEvent {
event: WE::HoveredFile(path), ..
}) => Some(Input::FileDrag(FileDrag::Hover(path))),
Some(E::WindowEvent {
event: WE::DroppedFile(path), ..
}) => Some(Input::FileDrag(FileDrag::Drop(path))),
Some(E::WindowEvent {
event: WE::HoveredFileCancelled, ..
}) => Some(Input::FileDrag(FileDrag::Cancel)),
Some(E::WindowEvent { event: WE::CloseRequested, .. }) => {
if self.automatic_close {
self.should_close = true;
}
Some(Input::Close(CloseArgs))
}
_ => {
*unknown = true;
None
}
}
}
fn fake_capture(&mut self) {
if let Some(pos) = self.last_cursor_pos {
// Fake capturing of cursor.
let size = self.size();
let cx = size.width / 2.0;
let cy = size.height / 2.0;
let dx = cx - pos[0];
let dy = cy - pos[1];
if dx != 0.0 || dy != 0.0 {
if let Ok(_) = self.ctx.window().set_cursor_position((cx, cy).into()) {
self.last_cursor_pos = Some([cx, cy]);
}
}
}
}
}
impl Window for GlutinWindow {
fn size(&self) -> Size {
let size = self.ctx.window().get_inner_size().unwrap_or((0.0, 0.0).into());
(size.width, size.height).into()
}
fn draw_size(&self) -> Size {
let size = self.ctx.window()
.get_inner_size()
.unwrap_or((0.0, 0.0).into())
.to_physical(self.ctx.window().get_hidpi_factor());
(size.width, size.height).into()
}
fn should_close(&self) -> bool { self.should_close }
fn set_should_close(&mut self, value: bool) { self.should_close = value; }
fn swap_buffers(&mut self) { let _ = self.ctx.swap_buffers(); }
fn wait_event(&mut self) -> Event { self.wait_event() }
fn wait_event_timeout(&mut self, timeout: Duration) -> Option<Event> {
self.wait_event_timeout(timeout)
}
fn poll_event(&mut self) -> Option<Event> { self.poll_event() }
}
impl BuildFromWindowSettings for GlutinWindow {
fn build_from_window_settings(settings: &WindowSettings)
-> Result<Self, Box<dyn Error>> {
GlutinWindow::new(settings)
}
}
impl AdvancedWindow for GlutinWindow {
fn get_title(&self) -> String { self.title.clone() }
fn set_title(&mut self, value: String) {
self.title = value;
self.ctx.window().set_title(&self.title);
}
fn get_exit_on_esc(&self) -> bool { self.exit_on_esc }
fn set_exit_on_esc(&mut self, value: bool) { self.exit_on_esc = value; }
fn get_automatic_close(&self) -> bool { self.automatic_close }
fn set_automatic_close(&mut self, value: bool) { self.automatic_close = value; }
fn set_capture_cursor(&mut self, value: bool) {
// Normally we would call `.grab_cursor(true)`
// but since relative mouse events does not work,
// the capturing of cursor is faked by hiding the cursor
// and setting the position to the center of window.
self.is_capturing_cursor = value;
self.ctx.window().hide_cursor(value);
if value {
self.fake_capture();
}
}
fn show(&mut self) { self.ctx.window().show(); }
fn hide(&mut self) { self.ctx.window().hide(); }
fn get_position(&self) -> Option<Position> {
self.ctx.window().get_position().map(|pos|
Position { x: pos.x as i32, y: pos.y as i32 })
}
fn set_position<P: Into<Position>>(&mut self, pos: P) {
let pos: Position = pos.into();
self.ctx.window().set_position((pos.x, pos.y).into());
}
fn set_size<S: Into<Size>>(&mut self, size: S) {
let size: Size = size.into();
self.ctx.window().set_inner_size((
size.width as f64,
size.height as f64,
).into());
}
}
impl OpenGLWindow for GlutinWindow {
fn get_proc_address(&mut self, proc_name: &str) -> ProcAddress {
self.ctx.get_proc_address(proc_name) as *const _
}
fn is_current(&self) -> bool {
self.ctx.is_current()
}
fn make_current(&mut self) {
unsafe {
let ctx = std::ptr::read(&self.ctx);
match ctx.make_current() {
Ok(ctx) => {
std::ptr::write(&mut self.ctx, ctx);
}
Err((ctx, e)) => {
std::ptr::write(&mut self.ctx, ctx);
panic!("Failed to make context current: {:?}", e);
}
}
}
}
}
/// Maps Glutin's key to Piston's key.
pub fn map_key(keycode: glutin::VirtualKeyCode) -> keyboard::Key {
use input::keyboard::Key;
use glutin::VirtualKeyCode as K;
match keycode {
K::Key0 => Key::D0,
K::Key1 => Key::D1,
K::Key2 => Key::D2,
K::Key3 => Key::D3,
K::Key4 => Key::D4,
K::Key5 => Key::D5,
K::Key6 => Key::D6,
K::Key7 => Key::D7,
K::Key8 => Key::D8,
K::Key9 => Key::D9,
K::A => Key::A,
K::B => Key::B,
K::C => Key::C,
K::D => Key::D,
K::E => Key::E,
K::F => Key::F,
K::G => Key::G,
K::H => Key::H,
K::I => Key::I,
K::J => Key::J,
K::K => Key::K,
K::L => Key::L,
K::M => Key::M,
K::N => Key::N,
K::O => Key::O,
K::P => Key::P,
K::Q => Key::Q,
K::R => Key::R,
K::S => Key::S,
K::T => Key::T,
K::U => Key::U,
K::V => Key::V,
K::W => Key::W,
K::X => Key::X,
K::Y => Key::Y,
K::Z => Key::Z,
K::Apostrophe => Key::Unknown,
K::Backslash => Key::Backslash,
K::Back => Key::Backspace,
// K::CapsLock => Key::CapsLock,
K::Delete => Key::Delete,
K::Comma => Key::Comma,
K::Down => Key::Down,
K::End => Key::End,
K::Return => Key::Return,
K::Equals => Key::Equals,
K::Escape => Key::Escape,
K::F1 => Key::F1,
K::F2 => Key::F2,
K::F3 => Key::F3,
K::F4 => Key::F4,
K::F5 => Key::F5,
K::F6 => Key::F6,
K::F7 => Key::F7,
K::F8 => Key::F8,
K::F9 => Key::F9,
K::F10 => Key::F10,
K::F11 => Key::F11,
K::F12 => Key::F12,
K::F13 => Key::F13,
K::F14 => Key::F14,
K::F15 => Key::F15,
K::F16 => Key::F16,
K::F17 => Key::F17,
K::F18 => Key::F18,
K::F19 => Key::F19,
K::F20 => Key::F20,
K::F21 => Key::F21,
K::F22 => Key::F22,
K::F23 => Key::F23,
K::F24 => Key::F24,
// Possibly next code.
// K::F25 => Key::Unknown,
K::Numpad0 => Key::NumPad0,
K::Numpad1 => Key::NumPad1,
K::Numpad2 => Key::NumPad2,
K::Numpad3 => Key::NumPad3,
K::Numpad4 => Key::NumPad4,
K::Numpad5 => Key::NumPad5,
K::Numpad6 => Key::NumPad6,
K::Numpad7 => Key::NumPad7,
K::Numpad8 => Key::NumPad8,
K::Numpad9 => Key::NumPad9,
K::NumpadComma => Key::NumPadDecimal,
K::Divide => Key::NumPadDivide,
K::Multiply => Key::NumPadMultiply,
K::Subtract => Key::NumPadMinus,
K::Add => Key::NumPadPlus,
K::NumpadEnter => Key::NumPadEnter,
K::NumpadEquals => Key::NumPadEquals,
K::LShift => Key::LShift,
K::LControl => Key::LCtrl,
K::LAlt => Key::LAlt,
K::RShift => Key::RShift,
K::RControl => Key::RCtrl,
K::RAlt => Key::RAlt,
// Map to backslash?
// K::GraveAccent => Key::Unknown,
K::Home => Key::Home,
K::Insert => Key::Insert,
K::Left => Key::Left,
K::LBracket => Key::LeftBracket,
// K::Menu => Key::Menu,
K::Minus => Key::Minus,
K::Numlock => Key::NumLockClear,
K::PageDown => Key::PageDown,
K::PageUp => Key::PageUp,
K::Pause => Key::Pause,
K::Period => Key::Period,
K::Snapshot => Key::PrintScreen,
K::Right => Key::Right,
K::RBracket => Key::RightBracket,
K::Scroll => Key::ScrollLock,
K::Semicolon => Key::Semicolon,
K::Slash => Key::Slash,
K::Space => Key::Space,
K::Tab => Key::Tab,
K::Up => Key::Up,
// K::World1 => Key::Unknown,
// K::World2 => Key::Unknown,
_ => Key::Unknown,
}
}
/// Maps Glutin's mouse button to Piston's mouse button.
pub fn map_mouse(mouse_button: glutin::MouseButton) -> MouseButton {
use glutin::MouseButton as M;
match mouse_button {
M::Left => MouseButton::Left,
M::Right => MouseButton::Right,
M::Middle => MouseButton::Middle,
M::Other(0) => MouseButton::X1,
M::Other(1) => MouseButton::X2,
M::Other(2) => MouseButton::Button6,
M::Other(3) => MouseButton::Button7,
M::Other(4) => MouseButton::Button8,
_ => MouseButton::Unknown
}
}
|
pub mod xml_export {
use std::fs::File;
use std::io::prelude::*;
use std::error::Error;
pub enum QuestionType {
SingleChoice,
MultipleChoice,
Essay,
TrueFalse,
Matching,
ShortAnswer,
NumericalResponse,
UndefinedQuestion,
}
pub struct Answer {
pub title: String,
pub points: i32,
}
pub struct Question {
pub question_type: QuestionType,
pub title: String,
pub answers: Vec<Answer>,
}
pub fn questions_to_moodle(questions: &mut Vec<Question>) {
let mut xml_string = String::new();
xml_string += "<?xml version=\"1.0\"?>";
xml_string += "<quiz>";
for question in questions {
let question_type_id = determine_type(&question.question_type);
let new_question = format!("<question type=\"{}\">", question_type_id);
xml_string += &new_question;
xml_string += "<questiontext format=\"html\">";
xml_string += "<text>";
xml_string += &question.title;
xml_string += "</text></questiontext>";
for answer in &question.answers {
xml_string += "<answer fraction=\"0\">";
xml_string += "<text>";
let answer_text = &answer.title;
xml_string += &answer_text;
xml_string += "</text>";
xml_string += "</answer>";
}
xml_string += "</question>";
}
xml_string += "</quiz>";
print_to_file(&xml_string);
}
fn print_to_file(xml_string: &String) {
let mut file = match File::create("test.xml") {
Err(why) => panic!("Ging net: {}", why.description()),
Ok(file) => file,
};
match file.write_all(xml_string.as_bytes()) {
Err(why) => {
panic!("Ging auch net: {}", why.description());
}
Ok(_) => println!("Gesaved!"),
}
}
fn determine_type(question_type: &QuestionType) -> String {
match question_type {
&QuestionType::SingleChoice => "multichoice".to_string(),
&QuestionType::MultipleChoice => "multichoice".to_string(),
&QuestionType::Essay => "essay".to_string(),
&QuestionType::TrueFalse => "truefalse".to_string(),
&QuestionType::Matching => "match".to_string(),
&QuestionType::ShortAnswer => "shortanswer".to_string(),
&QuestionType::NumericalResponse => "numerical".to_string(),
&QuestionType::UndefinedQuestion => "description".to_string(),
}
}
}
Bugfixed, questions are now accepted by moodle import
pub mod xml_export {
use std::fs::File;
use std::io::prelude::*;
use std::error::Error;
pub enum QuestionType {
SingleChoice,
MultipleChoice,
Essay,
TrueFalse,
Matching,
ShortAnswer,
NumericalResponse,
UndefinedQuestion,
}
pub struct Answer {
pub title: String,
pub points: i32,
}
pub struct Question {
pub question_type: QuestionType,
pub title: String,
pub answers: Vec<Answer>,
}
pub fn questions_to_moodle(questions: &mut Vec<Question>) {
let mut xml_string = String::new();
xml_string += "<?xml version=\"1.0\"?>";
xml_string += "<quiz>";
for question in questions {
let question_type_id = determine_type(&question.question_type);
let new_question = format!("<question type=\"{}\">", question_type_id);
xml_string += &new_question;
xml_string += "<name><text>";
xml_string += &question.title;
xml_string += "</text></name>";
xml_string += "<questiontext format=\"html\">";
xml_string += "<text>";
xml_string += &question.title;
xml_string += "</text></questiontext>";
for answer in &question.answers {
xml_string += "<answer fraction=\"0\">";
xml_string += "<text>";
let answer_text = &answer.title;
xml_string += &answer_text;
xml_string += "</text>";
xml_string += "</answer>";
}
xml_string += "</question>";
}
xml_string += "</quiz>";
print_to_file(&xml_string);
}
fn print_to_file(xml_string: &String) {
let mut file = match File::create("test.xml") {
Err(why) => panic!("Ging net: {}", why.description()),
Ok(file) => file,
};
match file.write_all(xml_string.as_bytes()) {
Err(why) => {
panic!("Ging auch net: {}", why.description());
}
Ok(_) => println!("Gesaved!"),
}
}
fn determine_type(question_type: &QuestionType) -> String {
match question_type {
&QuestionType::SingleChoice => "multichoice".to_string(),
&QuestionType::MultipleChoice => "multichoice".to_string(),
&QuestionType::Essay => "essay".to_string(),
&QuestionType::TrueFalse => "truefalse".to_string(),
&QuestionType::Matching => "match".to_string(),
&QuestionType::ShortAnswer => "shortanswer".to_string(),
&QuestionType::NumericalResponse => "numerical".to_string(),
&QuestionType::UndefinedQuestion => "description".to_string(),
}
}
}
|
extern crate nalgebra;
use nalgebra::DMat;
pub struct SmithWaterman{
sequence1: String,
sequence2: String
}
#[test]
fn it_works() {
}
Create matrix with loops
Added a debug display
#![feature(str_char)]
extern crate nalgebra;
use nalgebra::DMat;
use std::fmt::{Debug, Formatter, Result};
pub struct SmithWaterman{
sequence1: String,
sequence2: String,
matrix: DMat<isize>,
matched: isize,
missed: isize,
}
impl SmithWaterman{
pub fn new(sequence1: String, sequence2: String) -> SmithWaterman {
SmithWaterman{matrix: nalgebra::DMat::new_zeros(sequence1.len()+1, sequence2.len()+1),
sequence1: sequence1, sequence2: sequence2, matched: 2, missed: -1}
}
fn penalty(&self, value: isize, penalty_value: isize) -> isize{
match value.checked_add(penalty_value){
Some(i) =>{
if i<0 { 0 }else { i }
},
_ => {0}
}
}
pub fn score(&mut self){
let mut max_point = (0,0);
let mut max = 0;
for row in (1..self.sequence1.len()+1){
for col in (1..self.sequence2.len()+1){
let left = self.penalty(self.matrix[(row, col-1)], self.missed);
let top = self.penalty(self.matrix[(row-1, col)], self.missed);
let diagonal = self.matrix[(row-1, col-1)];
let diagonal_match = if self.sequence2.char_at(col-1) == self.sequence1.char_at(row-1){
self.penalty(diagonal, self.matched)
}else{
self.penalty(diagonal, self.missed)
};
let n = std::cmp::max(left, std::cmp::max(top, diagonal_match));
if n>=max{
max =n;
max_point = (row,col);
}
self.matrix[(row, col)] = n;
}
}
}
}
impl Debug for SmithWaterman {
fn fmt(&self, form:&mut Formatter) -> Result {
//nrows already has an extra over the sequence counts for the row of zeros
for row in 0..self.matrix.nrows()+1 {
for col in 0..self.matrix.ncols()+1 {
let _ = if col==0 && row>1{
write!(form, "{:>5}", self.sequence1.char_at(row-2).to_string())
} else if row==0 && col>1{
write!(form, "{:>5}", self.sequence2.char_at(col-2).to_string())
} else if row>=1 && col>=1{
write!(form, "{:>5}", self.matrix[(row-1,col-1)])
}else{
write!(form, "{:>5}", "-")
};
}
let _ = write!(form, "\n");
}
write!(form, "\n")
}
}
#[test]
fn its_debugging() {
let mut smitty = SmithWaterman::new("atgcatgcatgc".to_string(), "atgggcatg".to_string());
smitty.score();
println!("{:?}", smitty);
let mut smitty = SmithWaterman::new( "atgggcatg".to_string(),"atgcatgcatgc".to_string());
smitty.score();
println!("{:?}", smitty);
}
|
//! Promises in Rust.
//! See the `Promise` struct for more details.
#![warn(missing_docs)]
use std::thread;
use std::sync::mpsc::channel;
use std::time::Duration;
use std::thread::JoinHandle;
use std::marker::{Send};
use std::sync::mpsc::{Sender, Receiver, TryRecvError};
/// A promise is a way of doing work in the background. The promises in
/// this library have the same featureset as those in Ecmascript 5.
///
/// # Promises
/// Promises (sometimes known as "futures") are objects that represent
/// asynchronous tasks being run in the background, or results which
/// will exist in the future.
/// A promise will be in state of running, fulfilled, or done. In order to
/// use the results of a fulfilled promise, one attaches another promise
/// to it (i.e. via `then`). Like their Javascript counterparts, promises can
/// return an error (of type `E`).
///
/// # Panics
/// If the function being executed by a promise panics, it does so silently.
/// The panic will not resurface in the thread which created the promise,
/// and promises waiting on its result will never be called. In addition,
/// the `all` and `race` proimse methods will _ignore_ "dead" promises. They
/// will remove promises from their lists, and if there aren't any left
/// they will silently exit without doing anything.
///
/// Unfortunately, panics must be ignored for two reasons:
/// * Panic messages don't have a concrete type yet in Rust. If they did,
/// promiess would be able to inspect their predecessors' errors.
/// * Although a `Receiver` can correctly handle its paired `Sender` being
/// dropped, such as during a panic, for reasons stated above the "message"
/// of the panic is not relayed.
///
/// Finally, Ecmascript promises themselves do have the ability to return
/// and error type, represented as a `Result<T, E>` here. Thus, one should
/// use `try!` and other error handling rather than calls to `unwrap()`.
pub struct Promise<T: Send, E: Send> {
receiver: Receiver<Result<T, E>>
}
impl<T: Send + 'static, E: Send + 'static> Promise<T, E> {
/// Chains a function to be called after this promise resolves.
pub fn then<T2, E2>(self, callback: fn(f: T) -> Result<T2, E2>,
errback: fn(e: E) -> Result<T2, E2>)
-> Promise<T2, E2>
where T2: Send + 'static, E2: Send + 'static {
let recv = self.receiver;
let (tx, rx) = channel();
let thread = thread::spawn(move || {
Promise::promise_then(tx, recv, callback, errback);
});
recv.recv();
return Promise { receiver: rx };
}
/// Chains a function to be called after this promise resolves,
/// using a `Result` type.
pub fn then_result<T2, E2>(self,
callback: fn(Result<T, E>) -> Result<T2, E2>)
-> Promise<T2, E2>
where T2: Send + 'static, E2: Send + 'static {
let recv = self.receiver;
let recv2 = self.receiver;
let (tx, rx) = channel();
let thread = thread::spawn(move || {
Promise::promise_then_result(tx, callback);
});
return Promise { receiver: rx };
}
/// Creates a new promsie
pub fn new<F>(func: F) -> Promise<T, E>
where F: Send + 'static + FnOnce() -> Result<T, E> {
let (tx, rx) = channel();
let thread = thread::spawn(move || {
Promise::promise_new(tx, func);
});
return Promise { receiver: rx };
}
/// Applies a promise to the first of some promises to become fulfilled.
pub fn race<T2, E2>(promises: Vec<Promise<T, E>>,
func: fn(res: Result<T, E>) -> Result<T2, E2>)
-> Promise<T2, E2>
where T2: Send + 'static, E2: Send + 'static {
let mut recs = promises.into_iter().map(|p| p.receiver).collect();
let (tx, rx) = channel::<Result<T2, E2>>();
thread::spawn(move || {
Promise::promise_race(tx, recs, func);
});
return Promise { receiver: rx };
}
/// Calls a function with the result of all of the promises, or the error
/// of the first promise to error.
pub fn all<T2, E2>(promises: Vec<Promise<T, E>>,
func: fn(res: Result<Vec<T>, E>) -> Result<T2, E2>)
-> Promise<Vec<T>, E>
where T2: Send + 'static, E2: Send + 'static {
let receivers: Vec<Receiver<Result<T, E>>> =
promises.into_iter().map(|p| p.receiver).collect();
let (tx, rx) = channel();
thread::spawn(move || {
Promise::promise_all(receivers, tx);
});
return Promise { receiver: rx };
}
/// Creates a promise that resolves to a value
pub fn resolve(val: T) -> Promise<T, E> {
Promise::from_result(Ok(val))
}
/// Creates a promise that resolves to an error.
pub fn reject(val: E) -> Promise<T, E> {
Promise::from_result(Err(val))
}
/// Creates a new promise that will resolve to the result value.
pub fn from_result(result: Result<T, E>) -> Promise<T, E> {
let (tx, rx) = channel();
tx.send(result).unwrap();
Promise { receiver: rx }
}
// Implementation Functions
fn promise_new(tx: Sender<Result<T, E>>,
func: Box<FnOnce() -> Result<T, E>>) {
let result = func();
tx.send(result).unwrap_or(());
}
fn promise_then<T2, E2>(tx: Sender<Result<T2, E2>>,
rx: Receiver<Result<T, E>>,
callback: fn(T) -> Result<T2, E2>,
errback: fn(E) -> Result<T2, E2>)
where T2: Send + 'static, E2: Send + 'static {
if let Ok(message) = rx.recv() {
match message {
Ok(val) => tx.send(callback(val)).unwrap_or(()),
Err(err) => tx.send(errback(err)).unwrap_or(())
};
}
}
fn promise_then_result<T2, E2>(tx: Sender<Result<T2, E2>>,
rx: Receiver<Result<T, E>>,
callback: fn(Result<T, E>) -> Result<T2, E2>)
where T2: Send + 'static, E2: Send + 'static {
if let Ok(result) = rx.recv() {
tx.send(callback(result)).unwrap_or(());
}
}
// Static methods
fn promise_race<T2, E2>(tx: Sender<Result<T2, E2>>,
recs: Vec<Receiver<Result<T, E>>>,
func: fn(Result<T, E>) -> Result<T2, E2>)
where T2: Send + 'static, E2: Send + 'static {
'outer: loop {
for i in 0..recs.len() {
match recs[i].try_recv() {
Ok(val) => {
let new_val = func(val);
let _ = tx.send(new_val).unwrap_or(());
return;
}
Err(err) => {
if err == TryRecvError::Disconnected {
recs.remove(i);
}
}
}
}
}
}
fn promise_all(receivers: Vec<Receiver<Result<T, E>>>,
tx: Sender<Result<Vec<T>, E>>) {
let mut values: Vec<T> = Vec::with_capacity(receivers.len());
let mut mut_receivers = receivers;
'outer: loop {
for rec in receivers {
match rec.try_recv() {
Ok(val) => {
match val {
Ok(t) => values.push(t),
Err(e) => {
let _ = tx.send(Err(e)).unwrap_or(());
return;
}
}
}
Err(err) => {
// Remove dead promises?
}
}
}
}
}
}
Rename internal functions to impl_*
//! Promises in Rust.
//! See the `Promise` struct for more details.
#![warn(missing_docs)]
use std::thread;
use std::sync::mpsc::channel;
use std::time::Duration;
use std::thread::JoinHandle;
use std::marker::{Send};
use std::sync::mpsc::{Sender, Receiver, TryRecvError};
/// A promise is a way of doing work in the background. The promises in
/// this library have the same featureset as those in Ecmascript 5.
///
/// # Promises
/// Promises (sometimes known as "futures") are objects that represent
/// asynchronous tasks being run in the background, or results which
/// will exist in the future.
/// A promise will be in state of running, fulfilled, or done. In order to
/// use the results of a fulfilled promise, one attaches another promise
/// to it (i.e. via `then`). Like their Javascript counterparts, promises can
/// return an error (of type `E`).
///
/// # Panics
/// If the function being executed by a promise panics, it does so silently.
/// The panic will not resurface in the thread which created the promise,
/// and promises waiting on its result will never be called. In addition,
/// the `all` and `race` proimse methods will _ignore_ "dead" promises. They
/// will remove promises from their lists, and if there aren't any left
/// they will silently exit without doing anything.
///
/// Unfortunately, panics must be ignored for two reasons:
/// * Panic messages don't have a concrete type yet in Rust. If they did,
/// promiess would be able to inspect their predecessors' errors.
/// * Although a `Receiver` can correctly handle its paired `Sender` being
/// dropped, such as during a panic, for reasons stated above the "message"
/// of the panic is not relayed.
///
/// Finally, Ecmascript promises themselves do have the ability to return
/// and error type, represented as a `Result<T, E>` here. Thus, one should
/// use `try!` and other error handling rather than calls to `unwrap()`.
pub struct Promise<T: Send, E: Send> {
receiver: Receiver<Result<T, E>>
}
impl<T: Send + 'static, E: Send + 'static> Promise<T, E> {
/// Chains a function to be called after this promise resolves.
pub fn then<T2, E2>(self, callback: fn(t: T) -> Result<T2, E2>,
errback: fn(e: E) -> Result<T2, E2>)
-> Promise<T2, E2>
where T2: Send + 'static, E2: Send + 'static {
let recv = self.receiver;
let (tx, rx) = channel();
let thread = thread::spawn(move || {
Promise::impl_then(tx, recv, callback, errback);
});
return Promise { receiver: rx };
}
/// Chains a function to be called after this promise resolves,
/// using a `Result` type.
pub fn then_result<T2, E2>(self,
callback: fn(r: Result<T, E>) -> Result<T2, E2>)
-> Promise<T2, E2>
where T2: Send + 'static, E2: Send + 'static {
let recv = self.receiver;
let (tx, rx) = channel();
let thread = thread::spawn(move || {
Promise::impl_then_result(tx, callback);
});
return Promise { receiver: rx };
}
/// Creates a new promsie, which will eventually resolve to one of the
/// values of the `Result<T, E>` type.
pub fn new<F>(func: F) -> Promise<T, E>
where F: Send + 'static + FnOnce() -> Result<T, E> {
let (tx, rx) = channel();
let thread = thread::spawn(move || {
Promise::impl_new(tx, func);
});
return Promise { receiver: rx };
}
/// Applies a promise to the first of some promises to become fulfilled.
pub fn race(promises: Vec<Promise<T, E>>) -> Promise<T, E> {
let recs = promises.into_iter().map(|p| p.receiver).collect();
let (tx, rx) = channel();
thread::spawn(move || {
Promise::impl_race(tx, recs);
});
Promise { receiver: rx }
}
/// Calls a function with the result of all of the promises, or the error
/// of the first promise to error.
pub fn all(promises: Vec<Promise<T, E>>) -> Promise<Vec<T>, E> {
let receivers: Vec<Receiver<Result<T, E>>> =
promises.into_iter().map(|p| p.receiver).collect();
let (tx, rx) = channel();
thread::spawn(move || {
Promise::impl_all(tx, receivers);
});
return Promise { receiver: rx };
}
/// Creates a promise that resolves to a value
pub fn resolve(val: T) -> Promise<T, E> {
Promise::from_result(Ok(val))
}
/// Creates a promise that resolves to an error.
pub fn reject(val: E) -> Promise<T, E> {
Promise::from_result(Err(val))
}
/// Creates a new promise that will resolve to the result value.
pub fn from_result(result: Result<T, E>) -> Promise<T, E> {
let (tx, rx) = channel();
tx.send(result).unwrap();
Promise { receiver: rx }
}
// Implementation Functions
fn impl_new(tx: Sender<Result<T, E>>,
func: Box<FnOnce() -> Result<T, E>>) {
let result = func();
tx.send(result).unwrap_or(());
}
fn impl_then<T2, E2>(tx: Sender<Result<T2, E2>>,
rx: Receiver<Result<T, E>>,
callback: fn(T) -> Result<T2, E2>,
errback: fn(E) -> Result<T2, E2>)
where T2: Send + 'static, E2: Send + 'static {
if let Ok(message) = rx.recv() {
match message {
Ok(val) => tx.send(callback(val)).unwrap_or(()),
Err(err) => tx.send(errback(err)).unwrap_or(())
};
}
}
fn impl_then_result<T2, E2>(tx: Sender<Result<T2, E2>>,
rx: Receiver<Result<T, E>>,
callback: fn(Result<T, E>) -> Result<T2, E2>)
where T2: Send + 'static, E2: Send + 'static {
if let Ok(result) = rx.recv() {
tx.send(callback(result)).unwrap_or(());
}
}
// Static methods
fn impl_race<T2, E2>(tx: Sender<Result<T, E>>,
recs: Vec<Receiver<Result<T, E>>>) {
'outer: loop {
// Don't get stuck in an infinite loop
if recs.len() == 0 { return; }
for i in 0..recs.len() {
match recs[i].try_recv() {
Ok(val) => {
tx.send(val).unwrap_or(());
return;
}
Err(err) => {
if err == TryRecvError::Disconnected {
recs.remove(i);
}
}
}
}
}
}
fn impl_all<T2, E2>(tx: Sender<Result<Vec<T>, E>>,
recs: Vec<Receiver<Result<T, E>>>) {
let mut values: Vec<T> = Vec::with_capacity(recs.len());
let mut mut_receivers = recs;
'outer: loop {
for i in 0..mut_receivers.len() {
match mut_receivers[i].try_recv() {
Ok(val) => {
match val {
Ok(t) => values.push(t),
Err(_) => {
tx.send(val).unwrap_or(());
return;
}
}
mut_receivers.remove(i);
}
Err(err) => {
if err == TryRecvError::Disconnected {
mut_receivers.remove(i);
}
}
}
}
// Check if we are finished waiting for promises
// This can also happen if all promises panic
if mut_receivers.len() == 0 {
let result = Ok(values);
tx.send(result).unwrap_or(());
return; // Break from outer loop
}
}
}
}
|
#![feature(catch_panic, fnbox)]
#[macro_use]
extern crate lazy_static;
mod util;
pub mod reporter;
mod world_state;
pub mod example_group;
mod example_group_and_block;
use std::string::ToString;
use std::thread::JoinHandle;
use std::sync::{Arc, Mutex};
use util::{await_handles, any_is_err};
use example_group_and_block::ExampleGroupAndBlock;
#[derive(Debug)]
struct World {
state: Arc<Mutex<world_state::WorldState>>,
example_groups: Vec<ExampleGroupAndBlock>,
}
fn with_world<F, T>(blk: F) -> T where F: FnOnce(&mut World) -> T {
let c = WORLD.clone();
let mut guard = c.lock().unwrap();
blk(&mut guard)
}
fn consuming_world<F, T>(blk: F) -> T where F: FnOnce(World) -> T {
let guard = WORLD.clone();
let mut world_current = guard.lock().unwrap();
let mut world = World::new();
std::mem::swap(&mut world, &mut world_current);
blk(world)
}
impl World {
fn new() -> World {
World {
state: Arc::new(Mutex::new(world_state::WorldState {
failed: false,
reporter: reporter::Reporter,
})),
example_groups: Vec::new(),
}
}
fn describe<F>(&mut self, description: &str, example_group_definition_block: F) where F: Fn(&mut example_group::ExampleGroup) + Send + 'static {
self.example_groups.push(
ExampleGroupAndBlock {
group: example_group::ExampleGroup {
description: description.to_string(),
examples: Vec::new(),
},
block: Box::new(example_group_definition_block)
}
);
}
fn run(self) -> world_state::WorldState {
let join_handles: Vec<_> = World::create_example_group_join_handles(self.state.clone(), self.example_groups);
let results = await_handles(join_handles);
let failed = any_is_err(results);
let state_guard = self.state.clone();
let mut state = state_guard.lock().unwrap();
state.failed = failed;
world_state::WorldState {
failed: state.failed,
reporter: reporter::Reporter
}
}
fn create_example_group_join_handles(state: Arc<Mutex<world_state::WorldState>>, example_groups: Vec<ExampleGroupAndBlock>) -> Vec<JoinHandle<Result<(), ()>>> {
example_groups.into_iter().map(|egab| {
egab.spawn(&state)
}).collect()
}
}
lazy_static! {
static ref WORLD: Arc<Mutex<World>> = Arc::new(Mutex::new(World::new()));
}
pub fn describe<F>(description: &str, example_group_definition_block: F) where F: Fn(&mut example_group::ExampleGroup) + Send + 'static {
with_world(|world| {
world.describe(description, example_group_definition_block);
});
}
pub fn descriptor_main() {
let state = consuming_world(|world| world.run());
println!("{}", state.failed);
}
Fixups
#![feature(catch_panic, fnbox)]
#[macro_use]
extern crate lazy_static;
mod util;
pub mod reporter;
mod world_state;
pub mod example_group;
mod example_group_and_block;
use std::string::ToString;
use std::thread::JoinHandle;
use std::sync::{Arc, Mutex};
use util::{await_handles, any_is_err};
use example_group_and_block::ExampleGroupAndBlock;
#[derive(Debug)]
struct World {
state: Arc<Mutex<world_state::WorldState>>,
example_groups: Vec<ExampleGroupAndBlock>,
}
impl World {
fn new() -> World {
World {
state: Arc::new(Mutex::new(world_state::WorldState {
failed: false,
reporter: reporter::Reporter,
})),
example_groups: Vec::new(),
}
}
fn describe<F>(&mut self, description: &str, example_group_definition_block: F) where F: Fn(&mut example_group::ExampleGroup) + Send + 'static {
self.example_groups.push(
ExampleGroupAndBlock {
group: example_group::ExampleGroup {
description: description.to_string(),
examples: Vec::new(),
},
block: Box::new(example_group_definition_block)
}
);
}
fn run(self) -> world_state::WorldState {
let join_handles: Vec<_> = World::create_example_group_join_handles(self.state.clone(), self.example_groups);
let results = await_handles(join_handles);
let failed = any_is_err(results);
let state_guard = self.state.clone();
let mut state = state_guard.lock().unwrap();
state.failed = failed;
world_state::WorldState {
failed: state.failed,
reporter: reporter::Reporter
}
}
fn create_example_group_join_handles(state: Arc<Mutex<world_state::WorldState>>, example_groups: Vec<ExampleGroupAndBlock>) -> Vec<JoinHandle<Result<(), ()>>> {
example_groups.into_iter().map(|egab| {
egab.spawn(&state)
}).collect()
}
}
lazy_static! {
static ref WORLD: Arc<Mutex<World>> = Arc::new(Mutex::new(World::new()));
}
fn with_world<F, T>(blk: F) -> T where F: FnOnce(&mut World) -> T {
let c = WORLD.clone();
let mut guard = c.lock().unwrap();
blk(&mut guard)
}
fn consuming_world<F, T>(blk: F) -> T where F: FnOnce(World) -> T {
let guard = WORLD.clone();
let mut world_current = guard.lock().unwrap();
let mut world = World::new();
std::mem::swap(&mut world, &mut world_current);
blk(world)
}
pub fn describe<F>(description: &str, example_group_definition_block: F) where F: Fn(&mut example_group::ExampleGroup) + Send + 'static {
with_world(|world| {
world.describe(description, example_group_definition_block);
});
}
pub fn descriptor_main() {
let state = consuming_world(|world| world.run());
println!("{}", state.failed);
}
|
//! Rust-Postgres is a pure-Rust frontend for the popular PostgreSQL database. It
//! exposes a high level interface in the vein of JDBC or Go's `database/sql`
//! package.
//!
//! ```rust,no_run
//! extern crate postgres;
//! extern crate time;
//!
//! use time::Timespec;
//!
//! use postgres::{PostgresConnection, NoSsl};
//!
//! struct Person {
//! id: i32,
//! name: String,
//! time_created: Timespec,
//! data: Option<Vec<u8>>
//! }
//!
//! fn main() {
//! let conn = PostgresConnection::connect("postgresql://postgres@localhost",
//! &NoSsl).unwrap();
//!
//! conn.execute("CREATE TABLE person (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! time_created TIMESTAMP NOT NULL,
//! data BYTEA
//! )", []).unwrap();
//! let me = Person {
//! id: 0,
//! name: "Steven".to_string(),
//! time_created: time::get_time(),
//! data: None
//! };
//! conn.execute("INSERT INTO person (name, time_created, data)
//! VALUES ($1, $2, $3)",
//! [&me.name, &me.time_created, &me.data]).unwrap();
//!
//! let stmt = conn.prepare("SELECT id, name, time_created, data FROM person")
//! .unwrap();
//! for row in stmt.query([]).unwrap() {
//! let person = Person {
//! id: row.get(0u),
//! name: row.get(1u),
//! time_created: row.get(2u),
//! data: row.get(3u)
//! };
//! println!("Found person {}", person.name);
//! }
//! }
//! ```
#![doc(html_root_url="http://www.rust-ci.org/sfackler/rust-postgres/doc")]
#![feature(macro_rules, struct_variant, phase, unsafe_destructor)]
#![warn(missing_doc)]
extern crate collections;
extern crate openssl;
extern crate serialize;
extern crate time;
extern crate phf;
#[phase(plugin)]
extern crate phf_mac;
extern crate url;
#[phase(plugin, link)]
extern crate log;
use collections::{Deque, RingBuf};
use url::{UserInfo, Url};
use openssl::crypto::hash::{MD5, Hasher};
use openssl::ssl::SslContext;
use serialize::hex::ToHex;
use std::cell::{Cell, RefCell};
use std::collections::HashMap;
use std::from_str::FromStr;
use std::io::{BufferedStream, IoResult};
use std::io::net::ip::Port;
use std::mem;
use std::fmt;
use error::{InvalidUrl,
MissingPassword,
MissingUser,
PgConnectDbError,
PgConnectStreamError,
PgConnectBadResponse,
PgDbError,
PgInvalidColumn,
PgStreamDesynchronized,
PgStreamError,
PgWrongParamCount,
PostgresConnectError,
PostgresDbError,
PostgresError,
UnsupportedAuthentication,
PgWrongConnection,
PgWrongTransaction,
PgBadResponse};
use io::{MaybeSslStream, InternalStream};
use message::{AuthenticationCleartextPassword,
AuthenticationGSS,
AuthenticationKerberosV5,
AuthenticationMD5Password,
AuthenticationOk,
AuthenticationSCMCredential,
AuthenticationSSPI,
BackendKeyData,
BackendMessage,
BindComplete,
CommandComplete,
DataRow,
EmptyQueryResponse,
ErrorResponse,
NoData,
NoticeResponse,
NotificationResponse,
ParameterDescription,
ParameterStatus,
ParseComplete,
PortalSuspended,
ReadyForQuery,
RowDescription,
RowDescriptionEntry};
use message::{Bind,
CancelRequest,
Close,
Describe,
Execute,
FrontendMessage,
Parse,
PasswordMessage,
Query,
StartupMessage,
Sync,
Terminate};
use message::{WriteMessage, ReadMessage};
use types::{Oid, PostgresType, ToSql, FromSql, PgUnknownType, Binary};
#[macro_escape]
mod macros;
pub mod error;
mod io;
pub mod pool;
mod message;
pub mod types;
static CANARY: u32 = 0xdeadbeef;
/// A typedef of the result returned by many methods.
pub type PostgresResult<T> = Result<T, PostgresError>;
/// Specifies the target server to connect to.
#[deriving(Clone)]
pub enum PostgresConnectTarget {
/// Connect via TCP to the specified host.
TargetTcp(String),
/// Connect via a Unix domain socket in the specified directory.
TargetUnix(Path)
}
/// Authentication information
#[deriving(Clone)]
pub struct PostgresUserInfo {
/// The username
pub user: String,
/// An optional password
pub password: Option<String>,
}
/// Information necessary to open a new connection to a Postgres server.
#[deriving(Clone)]
pub struct PostgresConnectParams {
/// The target server
pub target: PostgresConnectTarget,
/// The target port.
///
/// Defaults to 5432 if not specified.
pub port: Option<Port>,
/// The user to login as.
///
/// `PostgresConnection::connect` requires a user but `cancel_query` does
/// not.
pub user: Option<PostgresUserInfo>,
/// The database to connect to. Defaults the value of `user`.
pub database: Option<String>,
/// Runtime parameters to be passed to the Postgres backend.
pub options: Vec<(String, String)>,
}
/// A trait implemented by types that can be converted into a
/// `PostgresConnectParams`.
pub trait IntoConnectParams {
/// Converts the value of `self` into a `PostgresConnectParams`.
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError>;
}
impl IntoConnectParams for PostgresConnectParams {
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError> {
Ok(self)
}
}
impl<'a> IntoConnectParams for &'a str {
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError> {
match Url::parse(self) {
Ok(url) => url.into_connect_params(),
Err(err) => return Err(InvalidUrl(err)),
}
}
}
impl IntoConnectParams for Url {
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError> {
let Url {
host,
port,
user,
path: url::Path { path, query: options, .. },
..
} = self;
let maybe_path = match url::decode_component(host.as_slice()) {
Ok(path) => path,
Err(err) => return Err(InvalidUrl(err)),
};
let target = if maybe_path.as_slice().starts_with("/") {
TargetUnix(Path::new(maybe_path))
} else {
TargetTcp(host)
};
let user = match user {
Some(UserInfo { user, pass }) =>
Some(PostgresUserInfo { user: user, password: pass }),
None => None,
};
let database = if !path.is_empty() {
// path contains the leading /
let (_, path) = path.as_slice().slice_shift_char();
Some(path.to_string())
} else {
None
};
Ok(PostgresConnectParams {
target: target,
port: port,
user: user,
database: database,
options: options,
})
}
}
/// Trait for types that can handle Postgres notice messages
pub trait PostgresNoticeHandler {
/// Handle a Postgres notice message
fn handle(&mut self, notice: PostgresDbError);
}
/// A notice handler which logs at the `info` level.
///
/// This is the default handler used by a `PostgresConnection`.
pub struct DefaultNoticeHandler;
impl PostgresNoticeHandler for DefaultNoticeHandler {
fn handle(&mut self, notice: PostgresDbError) {
info!("{}: {}", notice.severity, notice.message);
}
}
/// An asynchronous notification
pub struct PostgresNotification {
/// The process ID of the notifying backend process
pub pid: u32,
/// The name of the channel that the notify has been raised on
pub channel: String,
/// The "payload" string passed from the notifying process
pub payload: String,
}
/// An iterator over asynchronous notifications
pub struct PostgresNotifications<'conn> {
conn: &'conn PostgresConnection
}
impl<'conn> Iterator<PostgresNotification> for PostgresNotifications<'conn> {
/// Returns the oldest pending notification or `None` if there are none.
///
/// ## Note
///
/// `next` may return `Some` notification after returning `None` if a new
/// notification was received.
fn next(&mut self) -> Option<PostgresNotification> {
self.conn.conn.borrow_mut().notifications.pop_front()
}
}
/// Contains information necessary to cancel queries for a session
pub struct PostgresCancelData {
/// The process ID of the session
pub process_id: u32,
/// The secret key for the session
pub secret_key: u32,
}
/// Attempts to cancel an in-progress query.
///
/// The backend provides no information about whether a cancellation attempt
/// was successful or not. An error will only be returned if the driver was
/// unable to connect to the database.
///
/// A `PostgresCancelData` object can be created via
/// `PostgresConnection::cancel_data`. The object can cancel any query made on
/// that connection.
///
/// Only the host and port of the connetion info are used. See
/// `PostgresConnection::connect` for details of the `params` argument.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let url = "";
/// let conn = PostgresConnection::connect(url, &NoSsl).unwrap();
/// let cancel_data = conn.cancel_data();
/// spawn(proc() {
/// conn.execute("SOME EXPENSIVE QUERY", []).unwrap();
/// });
/// # let _ =
/// postgres::cancel_query(url, &NoSsl, cancel_data);
/// ```
pub fn cancel_query<T>(params: T, ssl: &SslMode, data: PostgresCancelData)
-> Result<(), PostgresConnectError> where T: IntoConnectParams {
let params = try!(params.into_connect_params());
let mut socket = match io::initialize_stream(¶ms, ssl) {
Ok(socket) => socket,
Err(err) => return Err(err)
};
try_pg_conn!(socket.write_message(&CancelRequest {
code: message::CANCEL_CODE,
process_id: data.process_id,
secret_key: data.secret_key
}));
try_pg_conn!(socket.flush());
Ok(())
}
struct InnerPostgresConnection {
stream: BufferedStream<MaybeSslStream<InternalStream>>,
next_stmt_id: uint,
notice_handler: Box<PostgresNoticeHandler+Send>,
notifications: RingBuf<PostgresNotification>,
cancel_data: PostgresCancelData,
unknown_types: HashMap<Oid, String>,
desynchronized: bool,
finished: bool,
trans_depth: u32,
canary: u32,
}
impl Drop for InnerPostgresConnection {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl InnerPostgresConnection {
fn connect<T>(params: T, ssl: &SslMode)
-> Result<InnerPostgresConnection, PostgresConnectError>
where T: IntoConnectParams {
let params = try!(params.into_connect_params());
let stream = try!(io::initialize_stream(¶ms, ssl));
let PostgresConnectParams {
user,
database,
mut options,
..
} = params;
let user = match user {
Some(user) => user,
None => return Err(MissingUser),
};
let mut conn = InnerPostgresConnection {
stream: BufferedStream::new(stream),
next_stmt_id: 0,
notice_handler: box DefaultNoticeHandler,
notifications: RingBuf::new(),
cancel_data: PostgresCancelData { process_id: 0, secret_key: 0 },
unknown_types: HashMap::new(),
desynchronized: false,
finished: false,
trans_depth: 0,
canary: CANARY,
};
options.push(("client_encoding".to_string(), "UTF8".to_string()));
// Postgres uses the value of TimeZone as the time zone for TIMESTAMP
// WITH TIME ZONE values. Timespec converts to GMT internally.
options.push(("TimeZone".to_string(), "GMT".to_string()));
// We have to clone here since we need the user again for auth
options.push(("user".to_string(), user.user.clone()));
match database {
Some(database) => options.push(("database".to_string(), database)),
None => {}
}
try_pg_conn!(conn.write_messages([StartupMessage {
version: message::PROTOCOL_VERSION,
parameters: options.as_slice()
}]));
try!(conn.handle_auth(user));
loop {
match try_pg_conn!(conn.read_message()) {
BackendKeyData { process_id, secret_key } => {
conn.cancel_data.process_id = process_id;
conn.cancel_data.secret_key = secret_key;
}
ReadyForQuery { .. } => break,
ErrorResponse { fields } =>
return Err(PgConnectDbError(PostgresDbError::new(fields))),
_ => return Err(PgConnectBadResponse),
}
}
Ok(conn)
}
fn write_messages(&mut self, messages: &[FrontendMessage]) -> IoResult<()> {
debug_assert!(!self.desynchronized);
for message in messages.iter() {
try_desync!(self, self.stream.write_message(message));
}
Ok(try_desync!(self, self.stream.flush()))
}
fn read_message(&mut self) -> IoResult<BackendMessage> {
debug_assert!(!self.desynchronized);
loop {
match try_desync!(self, self.stream.read_message()) {
NoticeResponse { fields } => {
self.notice_handler.handle(PostgresDbError::new(fields))
}
NotificationResponse { pid, channel, payload } => {
self.notifications.push(PostgresNotification {
pid: pid,
channel: channel,
payload: payload
})
}
ParameterStatus { parameter, value } => {
debug!("Parameter {} = {}", parameter, value)
}
val => return Ok(val)
}
}
}
fn handle_auth(&mut self, user: PostgresUserInfo) -> Result<(), PostgresConnectError> {
match try_pg_conn!(self.read_message()) {
AuthenticationOk => return Ok(()),
AuthenticationCleartextPassword => {
let pass = match user.password {
Some(pass) => pass,
None => return Err(MissingPassword)
};
try_pg_conn!(self.write_messages([PasswordMessage {
password: pass.as_slice(),
}]));
}
AuthenticationMD5Password { salt } => {
let pass = match user.password {
Some(pass) => pass,
None => return Err(MissingPassword)
};
let hasher = Hasher::new(MD5);
hasher.update(pass.as_bytes());
hasher.update(user.user.as_bytes());
let output = hasher.final().as_slice().to_hex();
let hasher = Hasher::new(MD5);
hasher.update(output.as_bytes());
hasher.update(salt);
let output = format!("md5{}",
hasher.final().as_slice().to_hex());
try_pg_conn!(self.write_messages([PasswordMessage {
password: output.as_slice()
}]));
}
AuthenticationKerberosV5
| AuthenticationSCMCredential
| AuthenticationGSS
| AuthenticationSSPI => return Err(UnsupportedAuthentication),
ErrorResponse { fields } => return Err(PgConnectDbError(PostgresDbError::new(fields))),
_ => {
self.desynchronized = true;
return Err(PgConnectBadResponse);
}
}
match try_pg_conn!(self.read_message()) {
AuthenticationOk => Ok(()),
ErrorResponse { fields } => Err(PgConnectDbError(PostgresDbError::new(fields))),
_ => {
self.desynchronized = true;
return Err(PgConnectBadResponse);
}
}
}
fn set_notice_handler(&mut self, handler: Box<PostgresNoticeHandler+Send>)
-> Box<PostgresNoticeHandler+Send> {
mem::replace(&mut self.notice_handler, handler)
}
fn prepare<'a>(&mut self, query: &str, conn: &'a PostgresConnection)
-> PostgresResult<PostgresStatement<'a>> {
let stmt_name = format!("s{}", self.next_stmt_id);
self.next_stmt_id += 1;
try_pg!(self.write_messages([
Parse {
name: stmt_name.as_slice(),
query: query,
param_types: []
},
Describe {
variant: 'S' as u8,
name: stmt_name.as_slice(),
},
Sync]));
match try_pg!(self.read_message()) {
ParseComplete => {}
ErrorResponse { fields } => {
try!(self.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => bad_response!(self),
}
let mut param_types: Vec<PostgresType> = match try_pg!(self.read_message()) {
ParameterDescription { types } => {
types.iter().map(|ty| PostgresType::from_oid(*ty)).collect()
}
_ => bad_response!(self),
};
let mut result_desc: Vec<ResultDescription> = match try_pg!(self.read_message()) {
RowDescription { descriptions } => {
descriptions.move_iter().map(|desc| {
let RowDescriptionEntry { name, type_oid, .. } = desc;
ResultDescription {
name: name,
ty: PostgresType::from_oid(type_oid)
}
}).collect()
}
NoData => vec![],
_ => bad_response!(self)
};
try!(self.wait_for_ready());
// now that the connection is ready again, get unknown type names
try!(self.set_type_names(param_types.mut_iter()));
try!(self.set_type_names(result_desc.mut_iter().map(|d| &mut d.ty)));
Ok(PostgresStatement {
conn: conn,
name: stmt_name,
param_types: param_types,
result_desc: result_desc,
next_portal_id: Cell::new(0),
finished: false,
})
}
fn set_type_names<'a, I: Iterator<&'a mut PostgresType>>(&mut self, mut it: I)
-> PostgresResult<()> {
for ty in it {
match *ty {
PgUnknownType { oid, ref mut name } => *name = try!(self.get_type_name(oid)),
_ => {}
}
}
Ok(())
}
fn get_type_name(&mut self, oid: Oid) -> PostgresResult<String> {
match self.unknown_types.find(&oid) {
Some(name) => return Ok(name.clone()),
None => {}
}
let name = try!(self.quick_query(format!("SELECT typname FROM pg_type \
WHERE oid={}", oid).as_slice()))
.move_iter().next().unwrap().move_iter().next().unwrap().unwrap();
self.unknown_types.insert(oid, name.clone());
Ok(name)
}
fn is_desynchronized(&self) -> bool {
self.desynchronized
}
fn canary(&self) -> u32 {
self.canary
}
fn wait_for_ready(&mut self) -> PostgresResult<()> {
match try_pg!(self.read_message()) {
ReadyForQuery { .. } => Ok(()),
_ => bad_response!(self)
}
}
fn quick_query(&mut self, query: &str) -> PostgresResult<Vec<Vec<Option<String>>>> {
check_desync!(self);
try_pg!(self.write_messages([Query { query: query }]));
let mut result = vec![];
loop {
match try_pg!(self.read_message()) {
ReadyForQuery { .. } => break,
DataRow { row } => {
result.push(row.move_iter().map(|opt| {
opt.map(|b| String::from_utf8(b).unwrap())
}).collect());
}
ErrorResponse { fields } => {
try!(self.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {}
}
}
Ok(result)
}
fn finish_inner(&mut self) -> PostgresResult<()> {
check_desync!(self);
self.canary = 0;
try_pg!(self.write_messages([Terminate]));
Ok(())
}
}
/// A connection to a Postgres database.
pub struct PostgresConnection {
conn: RefCell<InnerPostgresConnection>
}
impl PostgresConnection {
/// Creates a new connection to a Postgres database.
///
/// Most applications can use a URL string in the normal format:
///
/// ```notrust
/// postgresql://user[:password]@host[:port][/database][?param1=val1[[¶m2=val2]...]]
/// ```
///
/// The password may be omitted if not required. The default Postgres port
/// (5432) is used if none is specified. The database name defaults to the
/// username if not specified.
///
/// To connect to the server via Unix sockets, `host` should be set to the
/// absolute path of the directory containing the socket file. Since `/` is
/// a reserved character in URLs, the path should be URL encoded. If the
/// path contains non-UTF 8 characters, a `PostgresConnectParams` struct
/// should be created manually and passed in. Note that Postgres does not
/// support SSL over Unix sockets.
///
/// ## Examples
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let _ = || {
/// let url = "postgresql://postgres:hunter2@localhost:2994/foodb";
/// let conn = try!(PostgresConnection::connect(url, &NoSsl));
/// # Ok(()) };
/// ```
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let _ = || {
/// let url = "postgresql://postgres@%2Frun%2Fpostgres";
/// let conn = try!(PostgresConnection::connect(url, &NoSsl));
/// # Ok(()) };
/// ```
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, PostgresUserInfo, PostgresConnectParams, NoSsl, TargetUnix};
/// # let _ = || {
/// # let some_crazy_path = Path::new("");
/// let params = PostgresConnectParams {
/// target: TargetUnix(some_crazy_path),
/// port: None,
/// user: Some(PostgresUserInfo {
/// user: "postgres".to_string(),
/// password: None
/// }),
/// database: None,
/// options: vec![],
/// };
/// let conn = try!(PostgresConnection::connect(params, &NoSsl));
/// # Ok(()) };
/// ```
pub fn connect<T>(params: T, ssl: &SslMode) -> Result<PostgresConnection, PostgresConnectError>
where T: IntoConnectParams {
InnerPostgresConnection::connect(params, ssl).map(|conn| {
PostgresConnection { conn: RefCell::new(conn) }
})
}
/// Sets the notice handler for the connection, returning the old handler.
pub fn set_notice_handler(&self, handler: Box<PostgresNoticeHandler+Send>)
-> Box<PostgresNoticeHandler+Send> {
self.conn.borrow_mut().set_notice_handler(handler)
}
/// Returns an iterator over asynchronous notification messages.
///
/// Use the `LISTEN` command to register this connection for notifications.
pub fn notifications<'a>(&'a self) -> PostgresNotifications<'a> {
PostgresNotifications {
conn: self
}
}
/// Creates a new prepared statement.
///
/// A statement may contain parameters, specified by `$n` where `n` is the
/// index of the parameter in the list provided at execution time,
/// 1-indexed.
///
/// The statement is associated with the connection that created it and may
/// not outlive that connection.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// let maybe_stmt = conn.prepare("SELECT foo FROM bar WHERE baz = $1");
/// let stmt = match maybe_stmt {
/// Ok(stmt) => stmt,
/// Err(err) => fail!("Error preparing statement: {}", err)
/// };
pub fn prepare<'a>(&'a self, query: &str) -> PostgresResult<PostgresStatement<'a>> {
let mut conn = self.conn.borrow_mut();
if conn.trans_depth != 0 {
return Err(PgWrongTransaction);
}
conn.prepare(query, self)
}
/// Begins a new transaction.
///
/// Returns a `PostgresTransaction` object which should be used instead of
/// the connection for the duration of the transaction. The transaction
/// is active until the `PostgresTransaction` object falls out of scope.
///
/// ## Note
/// A transaction will roll back by default. Use the `set_commit` method to
/// set the transaction to commit.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # fn foo() -> Result<(), postgres::error::PostgresError> {
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// let trans = try!(conn.transaction());
/// try!(trans.execute("UPDATE foo SET bar = 10", []));
/// // ...
///
/// trans.set_commit();
/// try!(trans.finish());
/// # Ok(())
/// # }
/// ```
pub fn transaction<'a>(&'a self) -> PostgresResult<PostgresTransaction<'a>> {
check_desync!(self);
if self.conn.borrow().trans_depth != 0 {
return Err(PgWrongTransaction);
}
try!(self.quick_query("BEGIN"));
self.conn.borrow_mut().trans_depth += 1;
Ok(PostgresTransaction {
conn: self,
commit: Cell::new(false),
depth: 1,
finished: false,
})
}
/// A convenience function for queries that are only run once.
///
/// If an error is returned, it could have come from either the preparation
/// or execution of the statement.
///
/// On success, returns the number of rows modified or 0 if not applicable.
pub fn execute(&self, query: &str, params: &[&ToSql]) -> PostgresResult<uint> {
self.prepare(query).and_then(|stmt| stmt.execute(params))
}
/// Execute a sequence of SQL statements.
///
/// Statements should be separated by `;` characters. If an error occurs,
/// execution of the sequence will stop at that point. This is intended for
/// execution of batches of non-dynamic statements - for example, creation
/// of a schema for a fresh database.
///
/// ## Warning
///
/// Prepared statements should be used for any SQL statement which contains
/// user-specified data, as it provides functionality to safely embed that
/// data in the statment. Do not form statements via string concatenation
/// and feed them into this method.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, PostgresResult};
/// fn init_db(conn: &PostgresConnection) -> PostgresResult<()> {
/// conn.batch_execute("
/// CREATE TABLE person (
/// id SERIAL PRIMARY KEY,
/// name NOT NULL
/// );
///
/// CREATE TABLE purchase (
/// id SERIAL PRIMARY KEY,
/// person INT NOT NULL REFERENCES person (id),
/// time TIMESTAMPTZ NOT NULL,
/// );
///
/// CREATE INDEX ON purchase (time);
/// ")
/// }
/// ```
pub fn batch_execute(&self, query: &str) -> PostgresResult<()> {
let mut conn = self.conn.borrow_mut();
if conn.trans_depth != 0 {
return Err(PgWrongTransaction);
}
conn.quick_query(query).map(|_| ())
}
/// Returns information used to cancel pending queries.
///
/// Used with the `cancel_query` function. The object returned can be used
/// to cancel any query executed by the connection it was created from.
pub fn cancel_data(&self) -> PostgresCancelData {
self.conn.borrow().cancel_data
}
/// Returns whether or not the stream has been desynchronized due to an
/// error in the communication channel with the server.
///
/// If this has occurred, all further queries will immediately return an
/// error.
pub fn is_desynchronized(&self) -> bool {
self.conn.borrow().is_desynchronized()
}
/// Consumes the connection, closing it.
///
/// Functionally equivalent to the `Drop` implementation for
/// `PostgresConnection` except that it returns any error encountered to
/// the caller.
pub fn finish(self) -> PostgresResult<()> {
let mut conn = self.conn.borrow_mut();
conn.finished = true;
conn.finish_inner()
}
fn canary(&self) -> u32 {
self.conn.borrow().canary()
}
fn quick_query(&self, query: &str) -> PostgresResult<Vec<Vec<Option<String>>>> {
self.conn.borrow_mut().quick_query(query)
}
fn wait_for_ready(&self) -> PostgresResult<()> {
self.conn.borrow_mut().wait_for_ready()
}
fn read_message(&self) -> IoResult<BackendMessage> {
self.conn.borrow_mut().read_message()
}
fn write_messages(&self, messages: &[FrontendMessage]) -> IoResult<()> {
self.conn.borrow_mut().write_messages(messages)
}
}
/// Specifies the SSL support requested for a new connection
pub enum SslMode {
/// The connection will not use SSL
NoSsl,
/// The connection will use SSL if the backend supports it
PreferSsl(SslContext),
/// The connection must use SSL
RequireSsl(SslContext)
}
/// Represents a transaction on a database connection.
///
/// The transaction will roll back by default.
pub struct PostgresTransaction<'conn> {
conn: &'conn PostgresConnection,
commit: Cell<bool>,
depth: u32,
finished: bool,
}
#[unsafe_destructor]
impl<'conn> Drop for PostgresTransaction<'conn> {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl<'conn> PostgresTransaction<'conn> {
fn finish_inner(&mut self) -> PostgresResult<()> {
debug_assert!(self.depth == self.conn.conn.borrow().trans_depth);
let query = match (self.commit.get(), self.depth != 1) {
(false, true) => "ROLLBACK TO sp",
(false, false) => "ROLLBACK",
(true, true) => "RELEASE sp",
(true, false) => "COMMIT",
};
self.conn.conn.borrow_mut().trans_depth -= 1;
self.conn.quick_query(query).map(|_| ())
}
/// Like `PostgresConnection::prepare`.
pub fn prepare<'a>(&'a self, query: &str) -> PostgresResult<PostgresStatement<'a>> {
if self.conn.conn.borrow().trans_depth != self.depth {
return Err(PgWrongTransaction);
}
self.conn.conn.borrow_mut().prepare(query, self.conn)
}
/// Like `PostgresConnection::execute`.
pub fn execute(&self, query: &str, params: &[&ToSql]) -> PostgresResult<uint> {
self.prepare(query).and_then(|s| s.execute(params))
}
/// Like `PostgresConnection::batch_execute`.
pub fn batch_execute(&self, query: &str) -> PostgresResult<()> {
let mut conn = self.conn.conn.borrow_mut();
if conn.trans_depth != self.depth {
return Err(PgWrongTransaction);
}
conn.quick_query(query).map(|_| ())
}
/// Like `PostgresConnection::transaction`.
pub fn transaction<'a>(&'a self) -> PostgresResult<PostgresTransaction<'a>> {
check_desync!(self.conn);
if self.conn.conn.borrow().trans_depth != self.depth {
return Err(PgWrongTransaction);
}
try!(self.conn.quick_query("SAVEPOINT sp"));
self.conn.conn.borrow_mut().trans_depth += 1;
Ok(PostgresTransaction {
conn: self.conn,
commit: Cell::new(false),
depth: self.depth + 1,
finished: false,
})
}
/// Executes a prepared statement, returning a lazily loaded iterator over
/// the resulting rows.
///
/// No more than `row_limit` rows will be stored in memory at a time. Rows
/// will be pulled from the database in batches of `row_limit` as needed.
/// If `row_limit` is less than or equal to 0, `lazy_query` is equivalent
/// to `query`.
pub fn lazy_query<'trans, 'stmt>(&'trans self,
stmt: &'stmt PostgresStatement,
params: &[&ToSql],
row_limit: i32)
-> PostgresResult<PostgresLazyRows<'trans, 'stmt>> {
if self.conn as *const _ != stmt.conn as *const _ {
return Err(PgWrongConnection);
}
check_desync!(self.conn);
stmt.lazy_query(row_limit, params).map(|result| {
PostgresLazyRows {
_trans: self,
result: result
}
})
}
/// Determines if the transaction is currently set to commit or roll back.
pub fn will_commit(&self) -> bool {
self.commit.get()
}
/// Sets the transaction to commit at its completion.
pub fn set_commit(&self) {
self.commit.set(true);
}
/// Sets the transaction to roll back at its completion.
pub fn set_rollback(&self) {
self.commit.set(false);
}
/// A convenience method which consumes and commits a transaction.
pub fn commit(self) -> PostgresResult<()> {
self.set_commit();
self.finish()
}
/// Consumes the transaction, commiting or rolling it back as appropriate.
///
/// Functionally equivalent to the `Drop` implementation of
/// `PostgresTransaction` except that it returns any error to the caller.
pub fn finish(mut self) -> PostgresResult<()> {
self.finished = true;
self.finish_inner()
}
}
/// A prepared statement
pub struct PostgresStatement<'conn> {
conn: &'conn PostgresConnection,
name: String,
param_types: Vec<PostgresType>,
result_desc: Vec<ResultDescription>,
next_portal_id: Cell<uint>,
finished: bool,
}
#[unsafe_destructor]
impl<'conn> Drop for PostgresStatement<'conn> {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl<'conn> PostgresStatement<'conn> {
fn finish_inner(&mut self) -> PostgresResult<()> {
check_desync!(self.conn);
try_pg!(self.conn.write_messages([
Close {
variant: 'S' as u8,
name: self.name.as_slice()
},
Sync]));
loop {
match try_pg!(self.conn.read_message()) {
ReadyForQuery { .. } => break,
ErrorResponse { fields } => {
try!(self.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {}
}
}
Ok(())
}
fn inner_execute(&self, portal_name: &str, row_limit: i32, params: &[&ToSql])
-> PostgresResult<()> {
if self.param_types.len() != params.len() {
return Err(PgWrongParamCount {
expected: self.param_types.len(),
actual: params.len(),
});
}
let mut formats = vec![];
let mut values = vec![];
for (param, ty) in params.iter().zip(self.param_types.iter()) {
let (format, value) = try!(param.to_sql(ty));
formats.push(format as i16);
values.push(value);
};
let result_formats = Vec::from_elem(self.result_desc.len(), Binary as i16);
try_pg!(self.conn.write_messages([
Bind {
portal: portal_name,
statement: self.name.as_slice(),
formats: formats.as_slice(),
values: values.as_slice(),
result_formats: result_formats.as_slice()
},
Execute {
portal: portal_name,
max_rows: row_limit
},
Sync]));
match try_pg!(self.conn.read_message()) {
BindComplete => Ok(()),
ErrorResponse { fields } => {
try!(self.conn.wait_for_ready());
Err(PgDbError(PostgresDbError::new(fields)))
}
_ => {
self.conn.conn.borrow_mut().desynchronized = true;
return Err(PgBadResponse);
}
}
}
fn lazy_query<'a>(&'a self, row_limit: i32, params: &[&ToSql])
-> PostgresResult<PostgresRows<'a>> {
let id = self.next_portal_id.get();
self.next_portal_id.set(id + 1);
let portal_name = format!("{}p{}", self.name, id);
try!(self.inner_execute(portal_name.as_slice(), row_limit, params));
let mut result = PostgresRows {
stmt: self,
name: portal_name,
data: RingBuf::new(),
row_limit: row_limit,
more_rows: true,
finished: false,
};
try!(result.read_rows())
Ok(result)
}
/// Returns a slice containing the expected parameter types.
pub fn param_types(&self) -> &[PostgresType] {
self.param_types.as_slice()
}
/// Returns a slice describing the columns of the result of the query.
pub fn result_descriptions(&self) -> &[ResultDescription] {
self.result_desc.as_slice()
}
/// Executes the prepared statement, returning the number of rows modified.
///
/// If the statement does not modify any rows (e.g. SELECT), 0 is returned.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// # let bar = 1i32;
/// # let baz = true;
/// let stmt = conn.prepare("UPDATE foo SET bar = $1 WHERE baz = $2").unwrap();
/// match stmt.execute([&bar, &baz]) {
/// Ok(count) => println!("{} row(s) updated", count),
/// Err(err) => println!("Error executing query: {}", err)
/// }
pub fn execute(&self, params: &[&ToSql]) -> PostgresResult<uint> {
check_desync!(self.conn);
try!(self.inner_execute("", 0, params));
let num;
loop {
match try_pg!(self.conn.read_message()) {
DataRow { .. } => {}
ErrorResponse { fields } => {
try!(self.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
CommandComplete { tag } => {
let s = tag.as_slice().split(' ').last().unwrap();
num = FromStr::from_str(s).unwrap_or(0);
break;
}
EmptyQueryResponse => {
num = 0;
break;
}
_ => {
self.conn.conn.borrow_mut().desynchronized = true;
return Err(PgBadResponse);
}
}
}
try!(self.conn.wait_for_ready());
Ok(num)
}
/// Executes the prepared statement, returning an iterator over the
/// resulting rows.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// let stmt = conn.prepare("SELECT foo FROM bar WHERE baz = $1").unwrap();
/// # let baz = true;
/// let mut rows = match stmt.query([&baz]) {
/// Ok(rows) => rows,
/// Err(err) => fail!("Error running query: {}", err)
/// };
/// for row in rows {
/// let foo: i32 = row.get("foo");
/// println!("foo: {}", foo);
/// }
/// ```
pub fn query<'a>(&'a self, params: &[&ToSql]) -> PostgresResult<PostgresRows<'a>> {
check_desync!(self.conn);
self.lazy_query(0, params)
}
/// Consumes the statement, clearing it from the Postgres session.
///
/// Functionally identical to the `Drop` implementation of the
/// `PostgresStatement` except that it returns any error to the caller.
pub fn finish(mut self) -> PostgresResult<()> {
self.finished = true;
self.finish_inner()
}
}
/// Information about a column of the result of a query.
#[deriving(PartialEq, Eq)]
pub struct ResultDescription {
/// The name of the column
pub name: String,
/// The type of the data in the column
pub ty: PostgresType
}
/// An iterator over the resulting rows of a query.
pub struct PostgresRows<'stmt> {
stmt: &'stmt PostgresStatement<'stmt>,
name: String,
data: RingBuf<Vec<Option<Vec<u8>>>>,
row_limit: i32,
more_rows: bool,
finished: bool,
}
#[unsafe_destructor]
impl<'stmt> Drop for PostgresRows<'stmt> {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl<'stmt> PostgresRows<'stmt> {
fn finish_inner(&mut self) -> PostgresResult<()> {
check_desync!(self.stmt.conn);
try_pg!(self.stmt.conn.write_messages([
Close {
variant: 'P' as u8,
name: self.name.as_slice()
},
Sync]));
loop {
match try_pg!(self.stmt.conn.read_message()) {
ReadyForQuery { .. } => break,
ErrorResponse { fields } => {
try!(self.stmt.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {}
}
}
Ok(())
}
fn read_rows(&mut self) -> PostgresResult<()> {
loop {
match try_pg!(self.stmt.conn.read_message()) {
EmptyQueryResponse | CommandComplete { .. } => {
self.more_rows = false;
break;
},
PortalSuspended => {
self.more_rows = true;
break;
},
DataRow { row } => self.data.push(row),
ErrorResponse { fields } => {
try!(self.stmt.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {
self.stmt.conn.conn.borrow_mut().desynchronized = true;
return Err(PgBadResponse);
}
}
}
self.stmt.conn.wait_for_ready()
}
fn execute(&mut self) -> PostgresResult<()> {
try_pg!(self.stmt.conn.write_messages([
Execute {
portal: self.name.as_slice(),
max_rows: self.row_limit
},
Sync]));
self.read_rows()
}
/// Consumes the `PostgresRows`, cleaning up associated state.
///
/// Functionally identical to the `Drop` implementation on `PostgresRows`
/// except that it returns any error to the caller.
pub fn finish(mut self) -> PostgresResult<()> {
self.finished = true;
self.finish_inner()
}
fn try_next(&mut self) -> Option<PostgresResult<PostgresRow<'stmt>>> {
if self.data.is_empty() && self.more_rows {
match self.execute() {
Ok(()) => {}
Err(err) => return Some(Err(err))
}
}
self.data.pop_front().map(|row| {
Ok(PostgresRow {
stmt: self.stmt,
data: row
})
})
}
}
impl<'stmt> Iterator<PostgresRow<'stmt>> for PostgresRows<'stmt> {
#[inline]
fn next(&mut self) -> Option<PostgresRow<'stmt>> {
// we'll never hit the network on a non-lazy result
self.try_next().map(|r| r.unwrap())
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let lower = self.data.len();
let upper = if self.more_rows {
None
} else {
Some(lower)
};
(lower, upper)
}
}
/// A single result row of a query.
pub struct PostgresRow<'stmt> {
stmt: &'stmt PostgresStatement<'stmt>,
data: Vec<Option<Vec<u8>>>
}
impl<'stmt> PostgresRow<'stmt> {
/// Retrieves the contents of a field of the row.
///
/// A field can be accessed by the name or index of its column, though
/// access by index is more efficient. Rows are 0-indexed.
///
/// Returns an `Error` value if the index does not reference a column or
/// the return type is not compatible with the Postgres type.
pub fn get_opt<I, T>(&self, idx: I) -> PostgresResult<T> where I: RowIndex, T: FromSql {
let idx = match idx.idx(self.stmt) {
Some(idx) => idx,
None => return Err(PgInvalidColumn)
};
FromSql::from_sql(&self.stmt.result_desc[idx].ty, &self.data[idx])
}
/// Retrieves the contents of a field of the row.
///
/// A field can be accessed by the name or index of its column, though
/// access by index is more efficient. Rows are 0-indexed.
///
/// ## Failure
///
/// Fails if the index does not reference a column or the return type is
/// not compatible with the Postgres type.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// # let stmt = conn.prepare("").unwrap();
/// # let mut result = stmt.query([]).unwrap();
/// # let row = result.next().unwrap();
/// let foo: i32 = row.get(0u);
/// let bar: String = row.get("bar");
/// ```
pub fn get<I, T>(&self, idx: I) -> T where I: RowIndex + fmt::Show + Clone, T: FromSql {
match self.get_opt(idx.clone()) {
Ok(ok) => ok,
Err(err) => fail!("error retrieving column {}: {}", idx, err)
}
}
}
impl<'stmt> Collection for PostgresRow<'stmt> {
#[inline]
fn len(&self) -> uint {
self.data.len()
}
}
/// A trait implemented by types that can index into columns of a row.
pub trait RowIndex {
/// Returns the index of the appropriate column, or `None` if no such
/// column exists.
fn idx(&self, stmt: &PostgresStatement) -> Option<uint>;
}
impl RowIndex for uint {
#[inline]
fn idx(&self, stmt: &PostgresStatement) -> Option<uint> {
if *self > stmt.result_desc.len() {
None
} else {
Some(*self)
}
}
}
impl<'a> RowIndex for &'a str {
#[inline]
fn idx(&self, stmt: &PostgresStatement) -> Option<uint> {
stmt.result_descriptions().iter().position(|d| d.name.as_slice() == *self)
}
}
/// A lazily-loaded iterator over the resulting rows of a query
pub struct PostgresLazyRows<'trans, 'stmt> {
result: PostgresRows<'stmt>,
_trans: &'trans PostgresTransaction<'trans>,
}
impl<'trans, 'stmt> PostgresLazyRows<'trans, 'stmt> {
/// Like `PostgresRows::finish`.
#[inline]
pub fn finish(self) -> PostgresResult<()> {
self.result.finish()
}
}
impl<'trans, 'stmt> Iterator<PostgresResult<PostgresRow<'stmt>>>
for PostgresLazyRows<'trans, 'stmt> {
#[inline]
fn next(&mut self) -> Option<PostgresResult<PostgresRow<'stmt>>> {
self.result.try_next()
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
self.result.size_hint()
}
}
Fix doc typo
//! Rust-Postgres is a pure-Rust frontend for the popular PostgreSQL database. It
//! exposes a high level interface in the vein of JDBC or Go's `database/sql`
//! package.
//!
//! ```rust,no_run
//! extern crate postgres;
//! extern crate time;
//!
//! use time::Timespec;
//!
//! use postgres::{PostgresConnection, NoSsl};
//!
//! struct Person {
//! id: i32,
//! name: String,
//! time_created: Timespec,
//! data: Option<Vec<u8>>
//! }
//!
//! fn main() {
//! let conn = PostgresConnection::connect("postgresql://postgres@localhost",
//! &NoSsl).unwrap();
//!
//! conn.execute("CREATE TABLE person (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! time_created TIMESTAMP NOT NULL,
//! data BYTEA
//! )", []).unwrap();
//! let me = Person {
//! id: 0,
//! name: "Steven".to_string(),
//! time_created: time::get_time(),
//! data: None
//! };
//! conn.execute("INSERT INTO person (name, time_created, data)
//! VALUES ($1, $2, $3)",
//! [&me.name, &me.time_created, &me.data]).unwrap();
//!
//! let stmt = conn.prepare("SELECT id, name, time_created, data FROM person")
//! .unwrap();
//! for row in stmt.query([]).unwrap() {
//! let person = Person {
//! id: row.get(0u),
//! name: row.get(1u),
//! time_created: row.get(2u),
//! data: row.get(3u)
//! };
//! println!("Found person {}", person.name);
//! }
//! }
//! ```
#![doc(html_root_url="http://www.rust-ci.org/sfackler/rust-postgres/doc")]
#![feature(macro_rules, struct_variant, phase, unsafe_destructor)]
#![warn(missing_doc)]
extern crate collections;
extern crate openssl;
extern crate serialize;
extern crate time;
extern crate phf;
#[phase(plugin)]
extern crate phf_mac;
extern crate url;
#[phase(plugin, link)]
extern crate log;
use collections::{Deque, RingBuf};
use url::{UserInfo, Url};
use openssl::crypto::hash::{MD5, Hasher};
use openssl::ssl::SslContext;
use serialize::hex::ToHex;
use std::cell::{Cell, RefCell};
use std::collections::HashMap;
use std::from_str::FromStr;
use std::io::{BufferedStream, IoResult};
use std::io::net::ip::Port;
use std::mem;
use std::fmt;
use error::{InvalidUrl,
MissingPassword,
MissingUser,
PgConnectDbError,
PgConnectStreamError,
PgConnectBadResponse,
PgDbError,
PgInvalidColumn,
PgStreamDesynchronized,
PgStreamError,
PgWrongParamCount,
PostgresConnectError,
PostgresDbError,
PostgresError,
UnsupportedAuthentication,
PgWrongConnection,
PgWrongTransaction,
PgBadResponse};
use io::{MaybeSslStream, InternalStream};
use message::{AuthenticationCleartextPassword,
AuthenticationGSS,
AuthenticationKerberosV5,
AuthenticationMD5Password,
AuthenticationOk,
AuthenticationSCMCredential,
AuthenticationSSPI,
BackendKeyData,
BackendMessage,
BindComplete,
CommandComplete,
DataRow,
EmptyQueryResponse,
ErrorResponse,
NoData,
NoticeResponse,
NotificationResponse,
ParameterDescription,
ParameterStatus,
ParseComplete,
PortalSuspended,
ReadyForQuery,
RowDescription,
RowDescriptionEntry};
use message::{Bind,
CancelRequest,
Close,
Describe,
Execute,
FrontendMessage,
Parse,
PasswordMessage,
Query,
StartupMessage,
Sync,
Terminate};
use message::{WriteMessage, ReadMessage};
use types::{Oid, PostgresType, ToSql, FromSql, PgUnknownType, Binary};
#[macro_escape]
mod macros;
pub mod error;
mod io;
pub mod pool;
mod message;
pub mod types;
static CANARY: u32 = 0xdeadbeef;
/// A typedef of the result returned by many methods.
pub type PostgresResult<T> = Result<T, PostgresError>;
/// Specifies the target server to connect to.
#[deriving(Clone)]
pub enum PostgresConnectTarget {
/// Connect via TCP to the specified host.
TargetTcp(String),
/// Connect via a Unix domain socket in the specified directory.
TargetUnix(Path)
}
/// Authentication information
#[deriving(Clone)]
pub struct PostgresUserInfo {
/// The username
pub user: String,
/// An optional password
pub password: Option<String>,
}
/// Information necessary to open a new connection to a Postgres server.
#[deriving(Clone)]
pub struct PostgresConnectParams {
/// The target server
pub target: PostgresConnectTarget,
/// The target port.
///
/// Defaults to 5432 if not specified.
pub port: Option<Port>,
/// The user to login as.
///
/// `PostgresConnection::connect` requires a user but `cancel_query` does
/// not.
pub user: Option<PostgresUserInfo>,
/// The database to connect to. Defaults the value of `user`.
pub database: Option<String>,
/// Runtime parameters to be passed to the Postgres backend.
pub options: Vec<(String, String)>,
}
/// A trait implemented by types that can be converted into a
/// `PostgresConnectParams`.
pub trait IntoConnectParams {
/// Converts the value of `self` into a `PostgresConnectParams`.
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError>;
}
impl IntoConnectParams for PostgresConnectParams {
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError> {
Ok(self)
}
}
impl<'a> IntoConnectParams for &'a str {
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError> {
match Url::parse(self) {
Ok(url) => url.into_connect_params(),
Err(err) => return Err(InvalidUrl(err)),
}
}
}
impl IntoConnectParams for Url {
fn into_connect_params(self) -> Result<PostgresConnectParams, PostgresConnectError> {
let Url {
host,
port,
user,
path: url::Path { path, query: options, .. },
..
} = self;
let maybe_path = match url::decode_component(host.as_slice()) {
Ok(path) => path,
Err(err) => return Err(InvalidUrl(err)),
};
let target = if maybe_path.as_slice().starts_with("/") {
TargetUnix(Path::new(maybe_path))
} else {
TargetTcp(host)
};
let user = match user {
Some(UserInfo { user, pass }) => Some(PostgresUserInfo { user: user, password: pass }),
None => None,
};
let database = if !path.is_empty() {
// path contains the leading /
let (_, path) = path.as_slice().slice_shift_char();
Some(path.to_string())
} else {
None
};
Ok(PostgresConnectParams {
target: target,
port: port,
user: user,
database: database,
options: options,
})
}
}
/// Trait for types that can handle Postgres notice messages
pub trait PostgresNoticeHandler {
/// Handle a Postgres notice message
fn handle(&mut self, notice: PostgresDbError);
}
/// A notice handler which logs at the `info` level.
///
/// This is the default handler used by a `PostgresConnection`.
pub struct DefaultNoticeHandler;
impl PostgresNoticeHandler for DefaultNoticeHandler {
fn handle(&mut self, notice: PostgresDbError) {
info!("{}: {}", notice.severity, notice.message);
}
}
/// An asynchronous notification
pub struct PostgresNotification {
/// The process ID of the notifying backend process
pub pid: u32,
/// The name of the channel that the notify has been raised on
pub channel: String,
/// The "payload" string passed from the notifying process
pub payload: String,
}
/// An iterator over asynchronous notifications
pub struct PostgresNotifications<'conn> {
conn: &'conn PostgresConnection
}
impl<'conn> Iterator<PostgresNotification> for PostgresNotifications<'conn> {
/// Returns the oldest pending notification or `None` if there are none.
///
/// ## Note
///
/// `next` may return `Some` notification after returning `None` if a new
/// notification was received.
fn next(&mut self) -> Option<PostgresNotification> {
self.conn.conn.borrow_mut().notifications.pop_front()
}
}
/// Contains information necessary to cancel queries for a session
pub struct PostgresCancelData {
/// The process ID of the session
pub process_id: u32,
/// The secret key for the session
pub secret_key: u32,
}
/// Attempts to cancel an in-progress query.
///
/// The backend provides no information about whether a cancellation attempt
/// was successful or not. An error will only be returned if the driver was
/// unable to connect to the database.
///
/// A `PostgresCancelData` object can be created via
/// `PostgresConnection::cancel_data`. The object can cancel any query made on
/// that connection.
///
/// Only the host and port of the connection info are used. See
/// `PostgresConnection::connect` for details of the `params` argument.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let url = "";
/// let conn = PostgresConnection::connect(url, &NoSsl).unwrap();
/// let cancel_data = conn.cancel_data();
/// spawn(proc() {
/// conn.execute("SOME EXPENSIVE QUERY", []).unwrap();
/// });
/// # let _ =
/// postgres::cancel_query(url, &NoSsl, cancel_data);
/// ```
pub fn cancel_query<T>(params: T, ssl: &SslMode, data: PostgresCancelData)
-> Result<(), PostgresConnectError> where T: IntoConnectParams {
let params = try!(params.into_connect_params());
let mut socket = match io::initialize_stream(¶ms, ssl) {
Ok(socket) => socket,
Err(err) => return Err(err)
};
try_pg_conn!(socket.write_message(&CancelRequest {
code: message::CANCEL_CODE,
process_id: data.process_id,
secret_key: data.secret_key
}));
try_pg_conn!(socket.flush());
Ok(())
}
struct InnerPostgresConnection {
stream: BufferedStream<MaybeSslStream<InternalStream>>,
next_stmt_id: uint,
notice_handler: Box<PostgresNoticeHandler+Send>,
notifications: RingBuf<PostgresNotification>,
cancel_data: PostgresCancelData,
unknown_types: HashMap<Oid, String>,
desynchronized: bool,
finished: bool,
trans_depth: u32,
canary: u32,
}
impl Drop for InnerPostgresConnection {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl InnerPostgresConnection {
fn connect<T>(params: T, ssl: &SslMode)
-> Result<InnerPostgresConnection, PostgresConnectError>
where T: IntoConnectParams {
let params = try!(params.into_connect_params());
let stream = try!(io::initialize_stream(¶ms, ssl));
let PostgresConnectParams {
user,
database,
mut options,
..
} = params;
let user = match user {
Some(user) => user,
None => return Err(MissingUser),
};
let mut conn = InnerPostgresConnection {
stream: BufferedStream::new(stream),
next_stmt_id: 0,
notice_handler: box DefaultNoticeHandler,
notifications: RingBuf::new(),
cancel_data: PostgresCancelData { process_id: 0, secret_key: 0 },
unknown_types: HashMap::new(),
desynchronized: false,
finished: false,
trans_depth: 0,
canary: CANARY,
};
options.push(("client_encoding".to_string(), "UTF8".to_string()));
// Postgres uses the value of TimeZone as the time zone for TIMESTAMP
// WITH TIME ZONE values. Timespec converts to GMT internally.
options.push(("TimeZone".to_string(), "GMT".to_string()));
// We have to clone here since we need the user again for auth
options.push(("user".to_string(), user.user.clone()));
match database {
Some(database) => options.push(("database".to_string(), database)),
None => {}
}
try_pg_conn!(conn.write_messages([StartupMessage {
version: message::PROTOCOL_VERSION,
parameters: options.as_slice()
}]));
try!(conn.handle_auth(user));
loop {
match try_pg_conn!(conn.read_message()) {
BackendKeyData { process_id, secret_key } => {
conn.cancel_data.process_id = process_id;
conn.cancel_data.secret_key = secret_key;
}
ReadyForQuery { .. } => break,
ErrorResponse { fields } =>
return Err(PgConnectDbError(PostgresDbError::new(fields))),
_ => return Err(PgConnectBadResponse),
}
}
Ok(conn)
}
fn write_messages(&mut self, messages: &[FrontendMessage]) -> IoResult<()> {
debug_assert!(!self.desynchronized);
for message in messages.iter() {
try_desync!(self, self.stream.write_message(message));
}
Ok(try_desync!(self, self.stream.flush()))
}
fn read_message(&mut self) -> IoResult<BackendMessage> {
debug_assert!(!self.desynchronized);
loop {
match try_desync!(self, self.stream.read_message()) {
NoticeResponse { fields } => {
self.notice_handler.handle(PostgresDbError::new(fields))
}
NotificationResponse { pid, channel, payload } => {
self.notifications.push(PostgresNotification {
pid: pid,
channel: channel,
payload: payload
})
}
ParameterStatus { parameter, value } => {
debug!("Parameter {} = {}", parameter, value)
}
val => return Ok(val)
}
}
}
fn handle_auth(&mut self, user: PostgresUserInfo) -> Result<(), PostgresConnectError> {
match try_pg_conn!(self.read_message()) {
AuthenticationOk => return Ok(()),
AuthenticationCleartextPassword => {
let pass = match user.password {
Some(pass) => pass,
None => return Err(MissingPassword)
};
try_pg_conn!(self.write_messages([PasswordMessage {
password: pass.as_slice(),
}]));
}
AuthenticationMD5Password { salt } => {
let pass = match user.password {
Some(pass) => pass,
None => return Err(MissingPassword)
};
let hasher = Hasher::new(MD5);
hasher.update(pass.as_bytes());
hasher.update(user.user.as_bytes());
let output = hasher.final().as_slice().to_hex();
let hasher = Hasher::new(MD5);
hasher.update(output.as_bytes());
hasher.update(salt);
let output = format!("md5{}",
hasher.final().as_slice().to_hex());
try_pg_conn!(self.write_messages([PasswordMessage {
password: output.as_slice()
}]));
}
AuthenticationKerberosV5
| AuthenticationSCMCredential
| AuthenticationGSS
| AuthenticationSSPI => return Err(UnsupportedAuthentication),
ErrorResponse { fields } => return Err(PgConnectDbError(PostgresDbError::new(fields))),
_ => {
self.desynchronized = true;
return Err(PgConnectBadResponse);
}
}
match try_pg_conn!(self.read_message()) {
AuthenticationOk => Ok(()),
ErrorResponse { fields } => Err(PgConnectDbError(PostgresDbError::new(fields))),
_ => {
self.desynchronized = true;
return Err(PgConnectBadResponse);
}
}
}
fn set_notice_handler(&mut self, handler: Box<PostgresNoticeHandler+Send>)
-> Box<PostgresNoticeHandler+Send> {
mem::replace(&mut self.notice_handler, handler)
}
fn prepare<'a>(&mut self, query: &str, conn: &'a PostgresConnection)
-> PostgresResult<PostgresStatement<'a>> {
let stmt_name = format!("s{}", self.next_stmt_id);
self.next_stmt_id += 1;
try_pg!(self.write_messages([
Parse {
name: stmt_name.as_slice(),
query: query,
param_types: []
},
Describe {
variant: 'S' as u8,
name: stmt_name.as_slice(),
},
Sync]));
match try_pg!(self.read_message()) {
ParseComplete => {}
ErrorResponse { fields } => {
try!(self.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => bad_response!(self),
}
let mut param_types: Vec<PostgresType> = match try_pg!(self.read_message()) {
ParameterDescription { types } => {
types.iter().map(|ty| PostgresType::from_oid(*ty)).collect()
}
_ => bad_response!(self),
};
let mut result_desc: Vec<ResultDescription> = match try_pg!(self.read_message()) {
RowDescription { descriptions } => {
descriptions.move_iter().map(|desc| {
let RowDescriptionEntry { name, type_oid, .. } = desc;
ResultDescription {
name: name,
ty: PostgresType::from_oid(type_oid)
}
}).collect()
}
NoData => vec![],
_ => bad_response!(self)
};
try!(self.wait_for_ready());
// now that the connection is ready again, get unknown type names
try!(self.set_type_names(param_types.mut_iter()));
try!(self.set_type_names(result_desc.mut_iter().map(|d| &mut d.ty)));
Ok(PostgresStatement {
conn: conn,
name: stmt_name,
param_types: param_types,
result_desc: result_desc,
next_portal_id: Cell::new(0),
finished: false,
})
}
fn set_type_names<'a, I: Iterator<&'a mut PostgresType>>(&mut self, mut it: I)
-> PostgresResult<()> {
for ty in it {
match *ty {
PgUnknownType { oid, ref mut name } => *name = try!(self.get_type_name(oid)),
_ => {}
}
}
Ok(())
}
fn get_type_name(&mut self, oid: Oid) -> PostgresResult<String> {
match self.unknown_types.find(&oid) {
Some(name) => return Ok(name.clone()),
None => {}
}
let name = try!(self.quick_query(format!("SELECT typname FROM pg_type \
WHERE oid={}", oid).as_slice()))
.move_iter().next().unwrap().move_iter().next().unwrap().unwrap();
self.unknown_types.insert(oid, name.clone());
Ok(name)
}
fn is_desynchronized(&self) -> bool {
self.desynchronized
}
fn canary(&self) -> u32 {
self.canary
}
fn wait_for_ready(&mut self) -> PostgresResult<()> {
match try_pg!(self.read_message()) {
ReadyForQuery { .. } => Ok(()),
_ => bad_response!(self)
}
}
fn quick_query(&mut self, query: &str) -> PostgresResult<Vec<Vec<Option<String>>>> {
check_desync!(self);
try_pg!(self.write_messages([Query { query: query }]));
let mut result = vec![];
loop {
match try_pg!(self.read_message()) {
ReadyForQuery { .. } => break,
DataRow { row } => {
result.push(row.move_iter().map(|opt| {
opt.map(|b| String::from_utf8(b).unwrap())
}).collect());
}
ErrorResponse { fields } => {
try!(self.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {}
}
}
Ok(result)
}
fn finish_inner(&mut self) -> PostgresResult<()> {
check_desync!(self);
self.canary = 0;
try_pg!(self.write_messages([Terminate]));
Ok(())
}
}
/// A connection to a Postgres database.
pub struct PostgresConnection {
conn: RefCell<InnerPostgresConnection>
}
impl PostgresConnection {
/// Creates a new connection to a Postgres database.
///
/// Most applications can use a URL string in the normal format:
///
/// ```notrust
/// postgresql://user[:password]@host[:port][/database][?param1=val1[[¶m2=val2]...]]
/// ```
///
/// The password may be omitted if not required. The default Postgres port
/// (5432) is used if none is specified. The database name defaults to the
/// username if not specified.
///
/// To connect to the server via Unix sockets, `host` should be set to the
/// absolute path of the directory containing the socket file. Since `/` is
/// a reserved character in URLs, the path should be URL encoded. If the
/// path contains non-UTF 8 characters, a `PostgresConnectParams` struct
/// should be created manually and passed in. Note that Postgres does not
/// support SSL over Unix sockets.
///
/// ## Examples
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let _ = || {
/// let url = "postgresql://postgres:hunter2@localhost:2994/foodb";
/// let conn = try!(PostgresConnection::connect(url, &NoSsl));
/// # Ok(()) };
/// ```
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let _ = || {
/// let url = "postgresql://postgres@%2Frun%2Fpostgres";
/// let conn = try!(PostgresConnection::connect(url, &NoSsl));
/// # Ok(()) };
/// ```
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, PostgresUserInfo, PostgresConnectParams, NoSsl, TargetUnix};
/// # let _ = || {
/// # let some_crazy_path = Path::new("");
/// let params = PostgresConnectParams {
/// target: TargetUnix(some_crazy_path),
/// port: None,
/// user: Some(PostgresUserInfo {
/// user: "postgres".to_string(),
/// password: None
/// }),
/// database: None,
/// options: vec![],
/// };
/// let conn = try!(PostgresConnection::connect(params, &NoSsl));
/// # Ok(()) };
/// ```
pub fn connect<T>(params: T, ssl: &SslMode) -> Result<PostgresConnection, PostgresConnectError>
where T: IntoConnectParams {
InnerPostgresConnection::connect(params, ssl).map(|conn| {
PostgresConnection { conn: RefCell::new(conn) }
})
}
/// Sets the notice handler for the connection, returning the old handler.
pub fn set_notice_handler(&self, handler: Box<PostgresNoticeHandler+Send>)
-> Box<PostgresNoticeHandler+Send> {
self.conn.borrow_mut().set_notice_handler(handler)
}
/// Returns an iterator over asynchronous notification messages.
///
/// Use the `LISTEN` command to register this connection for notifications.
pub fn notifications<'a>(&'a self) -> PostgresNotifications<'a> {
PostgresNotifications {
conn: self
}
}
/// Creates a new prepared statement.
///
/// A statement may contain parameters, specified by `$n` where `n` is the
/// index of the parameter in the list provided at execution time,
/// 1-indexed.
///
/// The statement is associated with the connection that created it and may
/// not outlive that connection.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// let maybe_stmt = conn.prepare("SELECT foo FROM bar WHERE baz = $1");
/// let stmt = match maybe_stmt {
/// Ok(stmt) => stmt,
/// Err(err) => fail!("Error preparing statement: {}", err)
/// };
pub fn prepare<'a>(&'a self, query: &str) -> PostgresResult<PostgresStatement<'a>> {
let mut conn = self.conn.borrow_mut();
if conn.trans_depth != 0 {
return Err(PgWrongTransaction);
}
conn.prepare(query, self)
}
/// Begins a new transaction.
///
/// Returns a `PostgresTransaction` object which should be used instead of
/// the connection for the duration of the transaction. The transaction
/// is active until the `PostgresTransaction` object falls out of scope.
///
/// ## Note
/// A transaction will roll back by default. Use the `set_commit` method to
/// set the transaction to commit.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # fn foo() -> Result<(), postgres::error::PostgresError> {
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// let trans = try!(conn.transaction());
/// try!(trans.execute("UPDATE foo SET bar = 10", []));
/// // ...
///
/// trans.set_commit();
/// try!(trans.finish());
/// # Ok(())
/// # }
/// ```
pub fn transaction<'a>(&'a self) -> PostgresResult<PostgresTransaction<'a>> {
check_desync!(self);
if self.conn.borrow().trans_depth != 0 {
return Err(PgWrongTransaction);
}
try!(self.quick_query("BEGIN"));
self.conn.borrow_mut().trans_depth += 1;
Ok(PostgresTransaction {
conn: self,
commit: Cell::new(false),
depth: 1,
finished: false,
})
}
/// A convenience function for queries that are only run once.
///
/// If an error is returned, it could have come from either the preparation
/// or execution of the statement.
///
/// On success, returns the number of rows modified or 0 if not applicable.
pub fn execute(&self, query: &str, params: &[&ToSql]) -> PostgresResult<uint> {
self.prepare(query).and_then(|stmt| stmt.execute(params))
}
/// Execute a sequence of SQL statements.
///
/// Statements should be separated by `;` characters. If an error occurs,
/// execution of the sequence will stop at that point. This is intended for
/// execution of batches of non-dynamic statements - for example, creation
/// of a schema for a fresh database.
///
/// ## Warning
///
/// Prepared statements should be used for any SQL statement which contains
/// user-specified data, as it provides functionality to safely embed that
/// data in the statment. Do not form statements via string concatenation
/// and feed them into this method.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, PostgresResult};
/// fn init_db(conn: &PostgresConnection) -> PostgresResult<()> {
/// conn.batch_execute("
/// CREATE TABLE person (
/// id SERIAL PRIMARY KEY,
/// name NOT NULL
/// );
///
/// CREATE TABLE purchase (
/// id SERIAL PRIMARY KEY,
/// person INT NOT NULL REFERENCES person (id),
/// time TIMESTAMPTZ NOT NULL,
/// );
///
/// CREATE INDEX ON purchase (time);
/// ")
/// }
/// ```
pub fn batch_execute(&self, query: &str) -> PostgresResult<()> {
let mut conn = self.conn.borrow_mut();
if conn.trans_depth != 0 {
return Err(PgWrongTransaction);
}
conn.quick_query(query).map(|_| ())
}
/// Returns information used to cancel pending queries.
///
/// Used with the `cancel_query` function. The object returned can be used
/// to cancel any query executed by the connection it was created from.
pub fn cancel_data(&self) -> PostgresCancelData {
self.conn.borrow().cancel_data
}
/// Returns whether or not the stream has been desynchronized due to an
/// error in the communication channel with the server.
///
/// If this has occurred, all further queries will immediately return an
/// error.
pub fn is_desynchronized(&self) -> bool {
self.conn.borrow().is_desynchronized()
}
/// Consumes the connection, closing it.
///
/// Functionally equivalent to the `Drop` implementation for
/// `PostgresConnection` except that it returns any error encountered to
/// the caller.
pub fn finish(self) -> PostgresResult<()> {
let mut conn = self.conn.borrow_mut();
conn.finished = true;
conn.finish_inner()
}
fn canary(&self) -> u32 {
self.conn.borrow().canary()
}
fn quick_query(&self, query: &str) -> PostgresResult<Vec<Vec<Option<String>>>> {
self.conn.borrow_mut().quick_query(query)
}
fn wait_for_ready(&self) -> PostgresResult<()> {
self.conn.borrow_mut().wait_for_ready()
}
fn read_message(&self) -> IoResult<BackendMessage> {
self.conn.borrow_mut().read_message()
}
fn write_messages(&self, messages: &[FrontendMessage]) -> IoResult<()> {
self.conn.borrow_mut().write_messages(messages)
}
}
/// Specifies the SSL support requested for a new connection
pub enum SslMode {
/// The connection will not use SSL
NoSsl,
/// The connection will use SSL if the backend supports it
PreferSsl(SslContext),
/// The connection must use SSL
RequireSsl(SslContext)
}
/// Represents a transaction on a database connection.
///
/// The transaction will roll back by default.
pub struct PostgresTransaction<'conn> {
conn: &'conn PostgresConnection,
commit: Cell<bool>,
depth: u32,
finished: bool,
}
#[unsafe_destructor]
impl<'conn> Drop for PostgresTransaction<'conn> {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl<'conn> PostgresTransaction<'conn> {
fn finish_inner(&mut self) -> PostgresResult<()> {
debug_assert!(self.depth == self.conn.conn.borrow().trans_depth);
let query = match (self.commit.get(), self.depth != 1) {
(false, true) => "ROLLBACK TO sp",
(false, false) => "ROLLBACK",
(true, true) => "RELEASE sp",
(true, false) => "COMMIT",
};
self.conn.conn.borrow_mut().trans_depth -= 1;
self.conn.quick_query(query).map(|_| ())
}
/// Like `PostgresConnection::prepare`.
pub fn prepare<'a>(&'a self, query: &str) -> PostgresResult<PostgresStatement<'a>> {
if self.conn.conn.borrow().trans_depth != self.depth {
return Err(PgWrongTransaction);
}
self.conn.conn.borrow_mut().prepare(query, self.conn)
}
/// Like `PostgresConnection::execute`.
pub fn execute(&self, query: &str, params: &[&ToSql]) -> PostgresResult<uint> {
self.prepare(query).and_then(|s| s.execute(params))
}
/// Like `PostgresConnection::batch_execute`.
pub fn batch_execute(&self, query: &str) -> PostgresResult<()> {
let mut conn = self.conn.conn.borrow_mut();
if conn.trans_depth != self.depth {
return Err(PgWrongTransaction);
}
conn.quick_query(query).map(|_| ())
}
/// Like `PostgresConnection::transaction`.
pub fn transaction<'a>(&'a self) -> PostgresResult<PostgresTransaction<'a>> {
check_desync!(self.conn);
if self.conn.conn.borrow().trans_depth != self.depth {
return Err(PgWrongTransaction);
}
try!(self.conn.quick_query("SAVEPOINT sp"));
self.conn.conn.borrow_mut().trans_depth += 1;
Ok(PostgresTransaction {
conn: self.conn,
commit: Cell::new(false),
depth: self.depth + 1,
finished: false,
})
}
/// Executes a prepared statement, returning a lazily loaded iterator over
/// the resulting rows.
///
/// No more than `row_limit` rows will be stored in memory at a time. Rows
/// will be pulled from the database in batches of `row_limit` as needed.
/// If `row_limit` is less than or equal to 0, `lazy_query` is equivalent
/// to `query`.
pub fn lazy_query<'trans, 'stmt>(&'trans self,
stmt: &'stmt PostgresStatement,
params: &[&ToSql],
row_limit: i32)
-> PostgresResult<PostgresLazyRows<'trans, 'stmt>> {
if self.conn as *const _ != stmt.conn as *const _ {
return Err(PgWrongConnection);
}
check_desync!(self.conn);
stmt.lazy_query(row_limit, params).map(|result| {
PostgresLazyRows {
_trans: self,
result: result
}
})
}
/// Determines if the transaction is currently set to commit or roll back.
pub fn will_commit(&self) -> bool {
self.commit.get()
}
/// Sets the transaction to commit at its completion.
pub fn set_commit(&self) {
self.commit.set(true);
}
/// Sets the transaction to roll back at its completion.
pub fn set_rollback(&self) {
self.commit.set(false);
}
/// A convenience method which consumes and commits a transaction.
pub fn commit(self) -> PostgresResult<()> {
self.set_commit();
self.finish()
}
/// Consumes the transaction, commiting or rolling it back as appropriate.
///
/// Functionally equivalent to the `Drop` implementation of
/// `PostgresTransaction` except that it returns any error to the caller.
pub fn finish(mut self) -> PostgresResult<()> {
self.finished = true;
self.finish_inner()
}
}
/// A prepared statement
pub struct PostgresStatement<'conn> {
conn: &'conn PostgresConnection,
name: String,
param_types: Vec<PostgresType>,
result_desc: Vec<ResultDescription>,
next_portal_id: Cell<uint>,
finished: bool,
}
#[unsafe_destructor]
impl<'conn> Drop for PostgresStatement<'conn> {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl<'conn> PostgresStatement<'conn> {
fn finish_inner(&mut self) -> PostgresResult<()> {
check_desync!(self.conn);
try_pg!(self.conn.write_messages([
Close {
variant: 'S' as u8,
name: self.name.as_slice()
},
Sync]));
loop {
match try_pg!(self.conn.read_message()) {
ReadyForQuery { .. } => break,
ErrorResponse { fields } => {
try!(self.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {}
}
}
Ok(())
}
fn inner_execute(&self, portal_name: &str, row_limit: i32, params: &[&ToSql])
-> PostgresResult<()> {
if self.param_types.len() != params.len() {
return Err(PgWrongParamCount {
expected: self.param_types.len(),
actual: params.len(),
});
}
let mut formats = vec![];
let mut values = vec![];
for (param, ty) in params.iter().zip(self.param_types.iter()) {
let (format, value) = try!(param.to_sql(ty));
formats.push(format as i16);
values.push(value);
};
let result_formats = Vec::from_elem(self.result_desc.len(), Binary as i16);
try_pg!(self.conn.write_messages([
Bind {
portal: portal_name,
statement: self.name.as_slice(),
formats: formats.as_slice(),
values: values.as_slice(),
result_formats: result_formats.as_slice()
},
Execute {
portal: portal_name,
max_rows: row_limit
},
Sync]));
match try_pg!(self.conn.read_message()) {
BindComplete => Ok(()),
ErrorResponse { fields } => {
try!(self.conn.wait_for_ready());
Err(PgDbError(PostgresDbError::new(fields)))
}
_ => {
self.conn.conn.borrow_mut().desynchronized = true;
return Err(PgBadResponse);
}
}
}
fn lazy_query<'a>(&'a self, row_limit: i32, params: &[&ToSql])
-> PostgresResult<PostgresRows<'a>> {
let id = self.next_portal_id.get();
self.next_portal_id.set(id + 1);
let portal_name = format!("{}p{}", self.name, id);
try!(self.inner_execute(portal_name.as_slice(), row_limit, params));
let mut result = PostgresRows {
stmt: self,
name: portal_name,
data: RingBuf::new(),
row_limit: row_limit,
more_rows: true,
finished: false,
};
try!(result.read_rows())
Ok(result)
}
/// Returns a slice containing the expected parameter types.
pub fn param_types(&self) -> &[PostgresType] {
self.param_types.as_slice()
}
/// Returns a slice describing the columns of the result of the query.
pub fn result_descriptions(&self) -> &[ResultDescription] {
self.result_desc.as_slice()
}
/// Executes the prepared statement, returning the number of rows modified.
///
/// If the statement does not modify any rows (e.g. SELECT), 0 is returned.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// # let bar = 1i32;
/// # let baz = true;
/// let stmt = conn.prepare("UPDATE foo SET bar = $1 WHERE baz = $2").unwrap();
/// match stmt.execute([&bar, &baz]) {
/// Ok(count) => println!("{} row(s) updated", count),
/// Err(err) => println!("Error executing query: {}", err)
/// }
pub fn execute(&self, params: &[&ToSql]) -> PostgresResult<uint> {
check_desync!(self.conn);
try!(self.inner_execute("", 0, params));
let num;
loop {
match try_pg!(self.conn.read_message()) {
DataRow { .. } => {}
ErrorResponse { fields } => {
try!(self.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
CommandComplete { tag } => {
let s = tag.as_slice().split(' ').last().unwrap();
num = FromStr::from_str(s).unwrap_or(0);
break;
}
EmptyQueryResponse => {
num = 0;
break;
}
_ => {
self.conn.conn.borrow_mut().desynchronized = true;
return Err(PgBadResponse);
}
}
}
try!(self.conn.wait_for_ready());
Ok(num)
}
/// Executes the prepared statement, returning an iterator over the
/// resulting rows.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// let stmt = conn.prepare("SELECT foo FROM bar WHERE baz = $1").unwrap();
/// # let baz = true;
/// let mut rows = match stmt.query([&baz]) {
/// Ok(rows) => rows,
/// Err(err) => fail!("Error running query: {}", err)
/// };
/// for row in rows {
/// let foo: i32 = row.get("foo");
/// println!("foo: {}", foo);
/// }
/// ```
pub fn query<'a>(&'a self, params: &[&ToSql]) -> PostgresResult<PostgresRows<'a>> {
check_desync!(self.conn);
self.lazy_query(0, params)
}
/// Consumes the statement, clearing it from the Postgres session.
///
/// Functionally identical to the `Drop` implementation of the
/// `PostgresStatement` except that it returns any error to the caller.
pub fn finish(mut self) -> PostgresResult<()> {
self.finished = true;
self.finish_inner()
}
}
/// Information about a column of the result of a query.
#[deriving(PartialEq, Eq)]
pub struct ResultDescription {
/// The name of the column
pub name: String,
/// The type of the data in the column
pub ty: PostgresType
}
/// An iterator over the resulting rows of a query.
pub struct PostgresRows<'stmt> {
stmt: &'stmt PostgresStatement<'stmt>,
name: String,
data: RingBuf<Vec<Option<Vec<u8>>>>,
row_limit: i32,
more_rows: bool,
finished: bool,
}
#[unsafe_destructor]
impl<'stmt> Drop for PostgresRows<'stmt> {
fn drop(&mut self) {
if !self.finished {
let _ = self.finish_inner();
}
}
}
impl<'stmt> PostgresRows<'stmt> {
fn finish_inner(&mut self) -> PostgresResult<()> {
check_desync!(self.stmt.conn);
try_pg!(self.stmt.conn.write_messages([
Close {
variant: 'P' as u8,
name: self.name.as_slice()
},
Sync]));
loop {
match try_pg!(self.stmt.conn.read_message()) {
ReadyForQuery { .. } => break,
ErrorResponse { fields } => {
try!(self.stmt.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {}
}
}
Ok(())
}
fn read_rows(&mut self) -> PostgresResult<()> {
loop {
match try_pg!(self.stmt.conn.read_message()) {
EmptyQueryResponse | CommandComplete { .. } => {
self.more_rows = false;
break;
},
PortalSuspended => {
self.more_rows = true;
break;
},
DataRow { row } => self.data.push(row),
ErrorResponse { fields } => {
try!(self.stmt.conn.wait_for_ready());
return Err(PgDbError(PostgresDbError::new(fields)));
}
_ => {
self.stmt.conn.conn.borrow_mut().desynchronized = true;
return Err(PgBadResponse);
}
}
}
self.stmt.conn.wait_for_ready()
}
fn execute(&mut self) -> PostgresResult<()> {
try_pg!(self.stmt.conn.write_messages([
Execute {
portal: self.name.as_slice(),
max_rows: self.row_limit
},
Sync]));
self.read_rows()
}
/// Consumes the `PostgresRows`, cleaning up associated state.
///
/// Functionally identical to the `Drop` implementation on `PostgresRows`
/// except that it returns any error to the caller.
pub fn finish(mut self) -> PostgresResult<()> {
self.finished = true;
self.finish_inner()
}
fn try_next(&mut self) -> Option<PostgresResult<PostgresRow<'stmt>>> {
if self.data.is_empty() && self.more_rows {
match self.execute() {
Ok(()) => {}
Err(err) => return Some(Err(err))
}
}
self.data.pop_front().map(|row| {
Ok(PostgresRow {
stmt: self.stmt,
data: row
})
})
}
}
impl<'stmt> Iterator<PostgresRow<'stmt>> for PostgresRows<'stmt> {
#[inline]
fn next(&mut self) -> Option<PostgresRow<'stmt>> {
// we'll never hit the network on a non-lazy result
self.try_next().map(|r| r.unwrap())
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let lower = self.data.len();
let upper = if self.more_rows {
None
} else {
Some(lower)
};
(lower, upper)
}
}
/// A single result row of a query.
pub struct PostgresRow<'stmt> {
stmt: &'stmt PostgresStatement<'stmt>,
data: Vec<Option<Vec<u8>>>
}
impl<'stmt> PostgresRow<'stmt> {
/// Retrieves the contents of a field of the row.
///
/// A field can be accessed by the name or index of its column, though
/// access by index is more efficient. Rows are 0-indexed.
///
/// Returns an `Error` value if the index does not reference a column or
/// the return type is not compatible with the Postgres type.
pub fn get_opt<I, T>(&self, idx: I) -> PostgresResult<T> where I: RowIndex, T: FromSql {
let idx = match idx.idx(self.stmt) {
Some(idx) => idx,
None => return Err(PgInvalidColumn)
};
FromSql::from_sql(&self.stmt.result_desc[idx].ty, &self.data[idx])
}
/// Retrieves the contents of a field of the row.
///
/// A field can be accessed by the name or index of its column, though
/// access by index is more efficient. Rows are 0-indexed.
///
/// ## Failure
///
/// Fails if the index does not reference a column or the return type is
/// not compatible with the Postgres type.
///
/// ## Example
///
/// ```rust,no_run
/// # use postgres::{PostgresConnection, NoSsl};
/// # let conn = PostgresConnection::connect("", &NoSsl).unwrap();
/// # let stmt = conn.prepare("").unwrap();
/// # let mut result = stmt.query([]).unwrap();
/// # let row = result.next().unwrap();
/// let foo: i32 = row.get(0u);
/// let bar: String = row.get("bar");
/// ```
pub fn get<I, T>(&self, idx: I) -> T where I: RowIndex + fmt::Show + Clone, T: FromSql {
match self.get_opt(idx.clone()) {
Ok(ok) => ok,
Err(err) => fail!("error retrieving column {}: {}", idx, err)
}
}
}
impl<'stmt> Collection for PostgresRow<'stmt> {
#[inline]
fn len(&self) -> uint {
self.data.len()
}
}
/// A trait implemented by types that can index into columns of a row.
pub trait RowIndex {
/// Returns the index of the appropriate column, or `None` if no such
/// column exists.
fn idx(&self, stmt: &PostgresStatement) -> Option<uint>;
}
impl RowIndex for uint {
#[inline]
fn idx(&self, stmt: &PostgresStatement) -> Option<uint> {
if *self > stmt.result_desc.len() {
None
} else {
Some(*self)
}
}
}
impl<'a> RowIndex for &'a str {
#[inline]
fn idx(&self, stmt: &PostgresStatement) -> Option<uint> {
stmt.result_descriptions().iter().position(|d| d.name.as_slice() == *self)
}
}
/// A lazily-loaded iterator over the resulting rows of a query
pub struct PostgresLazyRows<'trans, 'stmt> {
result: PostgresRows<'stmt>,
_trans: &'trans PostgresTransaction<'trans>,
}
impl<'trans, 'stmt> PostgresLazyRows<'trans, 'stmt> {
/// Like `PostgresRows::finish`.
#[inline]
pub fn finish(self) -> PostgresResult<()> {
self.result.finish()
}
}
impl<'trans, 'stmt> Iterator<PostgresResult<PostgresRow<'stmt>>>
for PostgresLazyRows<'trans, 'stmt> {
#[inline]
fn next(&mut self) -> Option<PostgresResult<PostgresRow<'stmt>>> {
self.result.try_next()
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
self.result.size_hint()
}
}
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A lightweight logging facade.
//!
//! The `log` crate provides a single logging API that abstracts over the
//! actual logging implementation. Libraries can use the logging API provided
//! by this crate, and the consumer of those libraries can choose the logging
//! framework that is most suitable for its use case.
//!
//! If no logging implementation is selected, the facade falls back to a "noop"
//! implementation that ignores all log messages. The overhead in this case
//! is very small - just an integer load, comparison and jump.
//!
//! A log request consists of a _target_, a _level_, and a _body_. A target is a
//! string which defaults to the module path of the location of the log request,
//! though that default may be overridden. Logger implementations typically use
//! the target to filter requests based on some user configuration.
//!
//! # Use
//!
//! ## In libraries
//!
//! Libraries should link only to the `log` crate, and use the provided
//! macros to log whatever information will be useful to downstream consumers.
//!
//! ### Examples
//!
//! ```rust
//! # #![allow(unstable)]
//! #[macro_use]
//! extern crate log;
//!
//! # #[derive(Debug)] pub struct Yak(String);
//! # impl Yak { fn shave(&self, _: u32) {} }
//! # fn find_a_razor() -> Result<u32, u32> { Ok(1) }
//! pub fn shave_the_yak(yak: &Yak) {
//! info!(target: "yak_events", "Commencing yak shaving for {:?}", yak);
//!
//! loop {
//! match find_a_razor() {
//! Ok(razor) => {
//! info!("Razor located: {}", razor);
//! yak.shave(razor);
//! break;
//! }
//! Err(err) => {
//! warn!("Unable to locate a razor: {}, retrying", err);
//! }
//! }
//! }
//! }
//! # fn main() {}
//! ```
//!
//! ## In executables
//!
//! Executables should choose a logging framework and initialize it early in the
//! runtime of the program. Logging frameworks will typically include a
//! function to do this. Any log messages generated before the framework is
//! initialized will be ignored.
//!
//! The executable itself may use the `log` crate to log as well.
//!
//! ### Warning
//!
//! The logging system may only be initialized once.
//!
//! ### Examples
//!
//! ```rust,ignore
//! #[macro_use]
//! extern crate log;
//! extern crate env_logger;
//!
//! fn main() {
//! // Select env_logger, one possible logger implementation
//! // (see https://doc.rust-lang.org/log/env_logger/index.html)
//! env_logger::init().unwrap();
//!
//! info!("starting up");
//! error!("error: {}", 404);
//!
//! // ...
//! }
//! ```
//!
//! # Logger implementations
//!
//! Loggers implement the [`Log`] trait. Here's a very basic example that simply
//! logs all messages at the [`Error`][level_link], [`Warn`][level_link] or
//! [`Info`][level_link] levels to stdout:
//!
//! ```rust
//! extern crate log;
//!
//! use log::{Record, Level, Metadata};
//!
//! struct SimpleLogger;
//!
//! impl log::Log for SimpleLogger {
//! fn enabled(&self, metadata: &Metadata) -> bool {
//! metadata.level() <= Level::Info
//! }
//!
//! fn log(&self, record: &Record) {
//! if self.enabled(record.metadata()) {
//! println!("{} - {}", record.level(), record.args());
//! }
//! }
//! }
//!
//! # fn main() {}
//! ```
//!
//! Loggers are installed by calling the [`set_logger`] function. It takes a
//! closure which is provided a [`MaxLevelFilter`] token and returns a
//! [`Log`] trait object. The [`MaxLevelFilter`] token controls the global
//! maximum log level. The logging facade uses this as an optimization to
//! improve performance of log messages at levels that are disabled. In the
//! case of our example logger, we'll want to set the maximum log level to
//! [`Info`][level_link], since we ignore any [`Debug`][level_link] or
//! [`Trace`][level_link] level log messages. A logging framework should
//! provide a function that wraps a call to [`set_logger`], handling
//! initialization of the logger:
//!
//! ```rust
//! # extern crate log;
//! # use log::{Level, LevelFilter, SetLoggerError, Metadata};
//! # struct SimpleLogger;
//! # impl log::Log for SimpleLogger {
//! # fn enabled(&self, _: &Metadata) -> bool { false }
//! # fn log(&self, _: &log::Record) {}
//! # }
//! # fn main() {}
//! # #[cfg(feature = "use_std")]
//! pub fn init() -> Result<(), SetLoggerError> {
//! log::set_logger(|max_level| {
//! max_level.set(LevelFilter::Info);
//! Box::new(SimpleLogger)
//! })
//! }
//! ```
//!
//! # Use with `no_std`
//!
//! To use the `log` crate without depending on `libstd`, you need to specify
//! `default-features = false` when specifying the dependency in `Cargo.toml`.
//! This makes no difference to libraries using `log` since the logging API
//! remains the same. However executables will need to use the [`set_logger_raw`]
//! function to initialize a logger and the [`shutdown_logger_raw`] function to
//! shut down the global logger before exiting:
//!
//! ```rust
//! # extern crate log;
//! # use log::{Level, LevelFilter, SetLoggerError, ShutdownLoggerError,
//! # Metadata};
//! # struct SimpleLogger;
//! # impl log::Log for SimpleLogger {
//! # fn enabled(&self, _: &Metadata) -> bool { false }
//! # fn log(&self, _: &log::Record) {}
//! # }
//! # impl SimpleLogger {
//! # fn flush(&self) {}
//! # }
//! # fn main() {}
//! pub fn init() -> Result<(), SetLoggerError> {
//! unsafe {
//! log::set_logger_raw(|max_level| {
//! static LOGGER: SimpleLogger = SimpleLogger;
//! max_level.set(LevelFilter::Info);
//! &SimpleLogger
//! })
//! }
//! }
//! pub fn shutdown() -> Result<(), ShutdownLoggerError> {
//! log::shutdown_logger_raw().map(|logger| {
//! let logger = unsafe { &*(logger as *const SimpleLogger) };
//! logger.flush();
//! })
//! }
//! ```
//!
//! # Features
//!
//! Optionally, when defining a `Cargo.toml` file, additional parameters can be passed that affect
//! the logger depending on the target of the build. Effectively, `max_level_*` and
//! `release_max_level_*` directives can be added as features of the log dependency. When
//! these are set, they override the behavior of the logging levels above the declared maximum
//! preventing anything higher from logging.
//!
//! ```toml
//! [dependencies.log]
//! version = "^0.3.7"
//! features = ["max_level_debug", "release_max_level_warn"]
//! ```
//!
//! [`Log`]: trait.Log.html
//! [level_link]: enum.Level.html
//! [`set_logger`]: fn.set_logger.html
//! [`MaxLevelFilter`]: struct.MaxLevelFilter.html
//! [`set_logger_raw`]: fn.set_logger_raw.html
//! [`shutdown_logger_raw`]: fn.shutdown_logger_raw.html
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://www.rust-lang.org/favicon.ico",
html_root_url = "https://docs.rs/log/0.3.8")]
#![warn(missing_docs)]
#![deny(missing_debug_implementations)]
#![cfg_attr(feature = "nightly", feature(panic_handler))]
#![cfg_attr(not(feature = "use_std"), no_std)]
// When compiled for the rustc compiler itself we want to make sure that this is
// an unstable crate
#![cfg_attr(rustbuild, feature(staged_api, rustc_private))]
#![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))]
#[cfg(not(feature = "use_std"))]
extern crate core as std;
use std::cmp;
#[cfg(feature = "use_std")]
use std::error;
use std::fmt;
use std::mem;
use std::ops::Deref;
use std::str::FromStr;
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
#[macro_use]
mod macros;
mod serde;
// The setup here is a bit weird to make shutdown_logger_raw work.
//
// There are four different states that we care about: the logger's
// uninitialized, the logger's initializing (set_logger's been called but
// LOGGER hasn't actually been set yet), the logger's active, or the logger is
// shut down after calling shutdown_logger_raw.
//
// The LOGGER static holds a pointer to the global logger. It is protected by
// the STATE static which determines whether LOGGER has been initialized yet.
//
// The shutdown_logger_raw routine needs to make sure that no threads are
// actively logging before it returns. The number of actively logging threads is
// tracked in the REFCOUNT static. The routine first sets STATE back to
// INITIALIZING. All logging calls past that point will immediately return
// without accessing the logger. At that point, the at_exit routine just waits
// for the refcount to reach 0 before deallocating the logger. Note that the
// refcount does not necessarily monotonically decrease at this point, as new
// log calls still increment and decrement it, but the interval in between is
// small enough that the wait is really just for the active log calls to finish.
static mut LOGGER: *const Log = &NopLogger;
static STATE: AtomicUsize = ATOMIC_USIZE_INIT;
static REFCOUNT: AtomicUsize = ATOMIC_USIZE_INIT;
const UNINITIALIZED: usize = 0;
const INITIALIZING: usize = 1;
const INITIALIZED: usize = 2;
static MAX_LOG_LEVEL_FILTER: AtomicUsize = ATOMIC_USIZE_INIT;
static LOG_LEVEL_NAMES: [&'static str; 6] = ["OFF", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
static SET_LOGGER_ERROR: &'static str = "attempted to set a logger after the logging system \
was already initialized";
static SHUTDOWN_LOGGER_ERROR: &'static str = "attempted to shut down the logger without an active logger";
static LEVEL_PARSE_ERROR: &'static str = "attempted to convert a string that doesn't match an existing log level";
/// An enum representing the available verbosity levels of the logging framework.
///
/// Typical usage includes: checking if a certain `Level` is enabled with
/// [`log_enabled!`](macro.log_enabled.html), specifying the `Level` of
/// [`log!`](macro.log.html), and comparing a `Level` directly to a
/// [`LevelFilter`](enum.LevelFilter.html).
#[repr(usize)]
#[derive(Copy, Eq, Debug, Hash)]
pub enum Level {
/// The "error" level.
///
/// Designates very serious errors.
Error = 1, // This way these line up with the discriminants for LevelFilter below
/// The "warn" level.
///
/// Designates hazardous situations.
Warn,
/// The "info" level.
///
/// Designates useful information.
Info,
/// The "debug" level.
///
/// Designates lower priority information.
Debug,
/// The "trace" level.
///
/// Designates very low priority, often extremely verbose, information.
Trace,
}
impl Clone for Level {
#[inline]
fn clone(&self) -> Level {
*self
}
}
impl PartialEq for Level {
#[inline]
fn eq(&self, other: &Level) -> bool {
*self as usize == *other as usize
}
}
impl PartialEq<LevelFilter> for Level {
#[inline]
fn eq(&self, other: &LevelFilter) -> bool {
*self as usize == *other as usize
}
}
impl PartialOrd for Level {
#[inline]
fn partial_cmp(&self, other: &Level) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd<LevelFilter> for Level {
#[inline]
fn partial_cmp(&self, other: &LevelFilter) -> Option<cmp::Ordering> {
Some((*self as usize).cmp(&(*other as usize)))
}
}
impl Ord for Level {
#[inline]
fn cmp(&self, other: &Level) -> cmp::Ordering {
(*self as usize).cmp(&(*other as usize))
}
}
fn ok_or<T, E>(t: Option<T>, e: E) -> Result<T, E> {
match t {
Some(t) => Ok(t),
None => Err(e),
}
}
// Reimplemented here because std::ascii is not available in libcore
fn eq_ignore_ascii_case(a: &str, b: &str) -> bool {
fn to_ascii_uppercase(c: u8) -> u8 {
if c >= b'a' && c <= b'z' {
c - b'a' + b'A'
} else {
c
}
}
if a.len() == b.len() {
a.bytes()
.zip(b.bytes())
.all(|(a, b)| to_ascii_uppercase(a) == to_ascii_uppercase(b))
} else {
false
}
}
impl FromStr for Level {
type Err = ParseLevelError;
fn from_str(level: &str) -> Result<Level, Self::Err> {
ok_or(LOG_LEVEL_NAMES
.iter()
.position(|&name| eq_ignore_ascii_case(name, level))
.into_iter()
.filter(|&idx| idx != 0)
.map(|idx| Level::from_usize(idx).unwrap())
.next(),
ParseLevelError(()))
}
}
impl fmt::Display for Level {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.pad(LOG_LEVEL_NAMES[*self as usize])
}
}
impl Level {
fn from_usize(u: usize) -> Option<Level> {
match u {
1 => Some(Level::Error),
2 => Some(Level::Warn),
3 => Some(Level::Info),
4 => Some(Level::Debug),
5 => Some(Level::Trace),
_ => None,
}
}
/// Returns the most verbose logging level.
#[inline]
pub fn max() -> Level {
Level::Trace
}
/// Converts the `Level` to the equivalent `LevelFilter`.
#[inline]
pub fn to_level_filter(&self) -> LevelFilter {
LevelFilter::from_usize(*self as usize).unwrap()
}
}
/// An enum representing the available verbosity level filters of the logging
/// framework.
///
/// A `LevelFilter` may be compared directly to a [`Level`](enum.Level.html).
/// Use this type to [`get()`](struct.MaxLevelFilter.html#method.get) and
/// [`set()`](struct.MaxLevelFilter.html#method.set) the
/// [`MaxLevelFilter`](struct.MaxLevelFilter.html), or to match with the getter
/// [`max_level()`](fn.max_level.html).
#[repr(usize)]
#[derive(Copy, Eq, Debug, Hash)]
pub enum LevelFilter {
/// A level lower than all log levels.
Off,
/// Corresponds to the `Error` log level.
Error,
/// Corresponds to the `Warn` log level.
Warn,
/// Corresponds to the `Info` log level.
Info,
/// Corresponds to the `Debug` log level.
Debug,
/// Corresponds to the `Trace` log level.
Trace,
}
// Deriving generates terrible impls of these traits
impl Clone for LevelFilter {
#[inline]
fn clone(&self) -> LevelFilter {
*self
}
}
impl PartialEq for LevelFilter {
#[inline]
fn eq(&self, other: &LevelFilter) -> bool {
*self as usize == *other as usize
}
}
impl PartialEq<Level> for LevelFilter {
#[inline]
fn eq(&self, other: &Level) -> bool {
other.eq(self)
}
}
impl PartialOrd for LevelFilter {
#[inline]
fn partial_cmp(&self, other: &LevelFilter) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd<Level> for LevelFilter {
#[inline]
fn partial_cmp(&self, other: &Level) -> Option<cmp::Ordering> {
other.partial_cmp(self).map(|x| x.reverse())
}
}
impl Ord for LevelFilter {
#[inline]
fn cmp(&self, other: &LevelFilter) -> cmp::Ordering {
(*self as usize).cmp(&(*other as usize))
}
}
impl FromStr for LevelFilter {
type Err = ParseLevelError;
fn from_str(level: &str) -> Result<LevelFilter, Self::Err> {
ok_or(LOG_LEVEL_NAMES
.iter()
.position(|&name| eq_ignore_ascii_case(name, level))
.map(|p| LevelFilter::from_usize(p).unwrap()),
ParseLevelError(()))
}
}
impl fmt::Display for LevelFilter {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", LOG_LEVEL_NAMES[*self as usize])
}
}
impl LevelFilter {
fn from_usize(u: usize) -> Option<LevelFilter> {
match u {
0 => Some(LevelFilter::Off),
1 => Some(LevelFilter::Error),
2 => Some(LevelFilter::Warn),
3 => Some(LevelFilter::Info),
4 => Some(LevelFilter::Debug),
5 => Some(LevelFilter::Trace),
_ => None,
}
}
/// Returns the most verbose logging level filter.
#[inline]
pub fn max() -> LevelFilter {
LevelFilter::Trace
}
/// Converts `self` to the equivalent `Level`.
///
/// Returns `None` if `self` is `LevelFilter::Off`.
#[inline]
pub fn to_level(&self) -> Option<Level> {
Level::from_usize(*self as usize)
}
}
/// The "payload" of a log message.
///
/// # Use
///
/// `Record` structures are passed as parameters to the [`log`][method.log]
/// method of the [`Log`] trait. Logger implementors manipulate these
/// structures in order to display log messages. `Record`s are automatically
/// created by the [`log!`] macro and so are not seen by log users.
///
/// Note that the [`level()`] and [`target()`] accessors are equivalent to
/// `self.metadata().level()` and `self.metadata().target()` respectively.
/// These methods are provided as a convenience for users of this structure.
///
/// ## Example
///
/// The following example shows a simple logger that displays the level,
/// module path, and message of any `Record` that is passed to it.
///
/// ```rust
/// # extern crate log;
/// struct SimpleLogger;
///
/// impl log::Log for SimpleLogger {
/// fn enabled(&self, metadata: &log::Metadata) -> bool {
/// true
/// }
///
/// fn log(&self, record: &log::Record) {
/// if !self.enabled(record.metadata()) {
/// return;
/// }
///
/// println!("{}:{} -- {}",
/// record.level(),
/// record.location().module_path(),
/// record.args());
/// }
/// }
/// ```
///
/// [method.log]: trait.Log.html#method.log
/// [`Log`]: trait.Log.html
/// [`log!`]: macro.log.html
/// [`level()`]: struct.Record.html#method.level
/// [`target()`]: struct.Record.html#method.target
#[derive(Debug)]
pub struct Record<'a> {
metadata: Metadata<'a>,
location: &'a Location,
args: fmt::Arguments<'a>,
}
impl<'a> Record<'a> {
/// The message body.
pub fn args(&self) -> &fmt::Arguments<'a> {
&self.args
}
/// Metadata about the log directive.
pub fn metadata(&self) -> &Metadata {
&self.metadata
}
/// The location of the log directive.
pub fn location(&self) -> &Location {
self.location
}
/// The verbosity level of the message.
pub fn level(&self) -> Level {
self.metadata.level()
}
/// The name of the target of the directive.
pub fn target(&self) -> &str {
self.metadata.target()
}
}
/// Metadata about a log message.
///
/// # Use
///
/// `Metadata` structs are created when users of the library use
/// logging macros.
///
/// They are consumed by implementations of the `Log` trait in the
/// `enabled` method.
///
/// `Record`s use `Metadata` to determine the log message's severity
/// and target.
///
/// Users should use the `log_enabled!` macro in their code to avoid
/// constructing expensive log messages.
///
/// # Examples
///
/// ```rust
/// # #[macro_use]
/// # extern crate log;
/// #
/// use log::{Record, Level, Metadata};
///
/// struct MyLogger;
///
/// impl log::Log for MyLogger {
/// fn enabled(&self, metadata: &Metadata) -> bool {
/// metadata.level() <= Level::Info
/// }
///
/// fn log(&self, record: &Record) {
/// if self.enabled(record.metadata()) {
/// println!("{} - {}", record.level(), record.args());
/// }
/// }
/// }
///
/// # fn main(){}
/// ```
#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub struct Metadata<'a> {
level: Level,
target: &'a str,
}
impl<'a> Metadata<'a> {
/// The verbosity level of the message.
pub fn level(&self) -> Level {
self.level
}
/// The name of the target of the directive.
pub fn target(&self) -> &str {
self.target
}
}
/// A trait encapsulating the operations required of a logger
pub trait Log: Sync + Send {
/// Determines if a log message with the specified metadata would be
/// logged.
///
/// This is used by the `log_enabled!` macro to allow callers to avoid
/// expensive computation of log message arguments if the message would be
/// discarded anyway.
fn enabled(&self, metadata: &Metadata) -> bool;
/// Logs the `Record`.
///
/// Note that `enabled` is *not* necessarily called before this method.
/// Implementations of `log` should perform all necessary filtering
/// internally.
fn log(&self, record: &Record);
}
// Just used as a dummy initial value for LOGGER
struct NopLogger;
impl Log for NopLogger {
fn enabled(&self, _: &Metadata) -> bool {
false
}
fn log(&self, _: &Record) {}
}
/// The location of a log message.
///
/// # Use
///
/// `Location` structs are created by the [`log!`] macro. They are attached to
/// [`Record`] structs, which are used by loggers to display logging messages.
/// `Location`s can be accessed using the [`location()`] method on [`Record`]s.
/// Log users do not need to directly use this struct.
///
/// ## Example
/// The below example shows a simple logger that prints the module path,
/// file name, and line number of the location the [`log!`] macro was called.
///
/// ```rust
/// # extern crate log;
/// struct SimpleLogger;
///
/// impl log::Log for SimpleLogger {
/// fn enabled(&self, metadata: &log::Metadata) -> bool {
/// true
/// }
///
/// fn log(&self, record: &log::Record) {
/// if !self.enabled(record.metadata()) {
/// return;
/// }
///
/// let location = record.location();
/// println!("{}:{}:{} -- {}",
/// location.module_path(),
/// location.file(),
/// location.line(),
/// record.args());
/// }
/// }
/// ```
///
/// # Warning
///
/// The fields of this struct are public so that they may be initialized by the
/// [`log!`] macro. They are subject to change at any time and should never be
/// accessed directly.
///
/// [`log!`]: macro.log.html
/// [`Record`]: struct.Record.html
/// [`location()`]: struct.Record.html#method.location
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Location {
#[doc(hidden)]
pub __module_path: &'static str,
#[doc(hidden)]
pub __file: &'static str,
#[doc(hidden)]
pub __line: u32,
}
impl Location {
/// The module path of the message.
pub fn module_path(&self) -> &str {
self.__module_path
}
/// The source file containing the message.
pub fn file(&self) -> &str {
self.__file
}
/// The line containing the message.
pub fn line(&self) -> u32 {
self.__line
}
}
/// A token providing read and write access to the global maximum log level
/// filter.
///
/// The maximum log level is used as an optimization to avoid evaluating log
/// messages that will be ignored by the logger. Any message with a level
/// higher than the maximum log level filter will be ignored. A logger should
/// make sure to keep the maximum log level filter in sync with its current
/// configuration.
#[allow(missing_copy_implementations)]
pub struct MaxLevelFilter(());
impl fmt::Debug for MaxLevelFilter {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "MaxLevelFilter")
}
}
impl MaxLevelFilter {
/// Gets the current maximum log level filter.
pub fn get(&self) -> LevelFilter {
max_level()
}
/// Sets the maximum log level.
pub fn set(&self, level: LevelFilter) {
MAX_LOG_LEVEL_FILTER.store(level as usize, Ordering::SeqCst)
}
}
/// Returns the current maximum log level.
///
/// The `log!`, `error!`, `warn!`, `info!`, `debug!`, and `trace!` macros check
/// this value and discard any message logged at a higher level. The maximum
/// log level is set by the `MaxLevel` token passed to loggers.
#[inline(always)]
pub fn max_level() -> LevelFilter {
unsafe { mem::transmute(MAX_LOG_LEVEL_FILTER.load(Ordering::Relaxed)) }
}
/// Sets the global logger.
///
/// The `make_logger` closure is passed a `MaxLevel` object, which the
/// logger should use to keep the global maximum log level in sync with the
/// highest log level that the logger will not ignore.
///
/// This function may only be called once in the lifetime of a program. Any log
/// events that occur before the call to `set_logger` completes will be
/// ignored.
///
/// This function does not typically need to be called manually. Logger
/// implementations should provide an initialization method that calls
/// `set_logger` internally.
///
/// Requires the `use_std` feature (enabled by default).
#[cfg(feature = "use_std")]
pub fn set_logger<M>(make_logger: M) -> Result<(), SetLoggerError>
where M: FnOnce(MaxLevelFilter) -> Box<Log>
{
unsafe { set_logger_raw(|max_level| mem::transmute(make_logger(max_level))) }
}
/// Sets the global logger from a raw pointer.
///
/// This function is similar to `set_logger` except that it is usable in
/// `no_std` code.
///
/// The `make_logger` closure is passed a `MaxLevel` object, which the
/// logger should use to keep the global maximum log level in sync with the
/// highest log level that the logger will not ignore.
///
/// This function may only be called once in the lifetime of a program. Any log
/// events that occur before the call to `set_logger_raw` completes will be
/// ignored.
///
/// This function does not typically need to be called manually. Logger
/// implementations should provide an initialization method that calls
/// `set_logger_raw` internally.
///
/// # Safety
///
/// The pointer returned by `make_logger` must remain valid for the entire
/// duration of the program or until `shutdown_logger_raw` is called. In
/// addition, `shutdown_logger` *must not* be called after this function.
pub unsafe fn set_logger_raw<M>(make_logger: M) -> Result<(), SetLoggerError>
where M: FnOnce(MaxLevelFilter) -> *const Log
{
if STATE.compare_and_swap(UNINITIALIZED, INITIALIZING, Ordering::SeqCst) != UNINITIALIZED {
return Err(SetLoggerError(()));
}
LOGGER = make_logger(MaxLevelFilter(()));
STATE.store(INITIALIZED, Ordering::SeqCst);
Ok(())
}
/// Shuts down the global logger.
///
/// This function may only be called once in the lifetime of a program, and may
/// not be called before `set_logger`. Once the global logger has been shut
/// down, it can no longer be re-initialized by `set_logger`. Any log events
/// that occur after the call to `shutdown_logger` completes will be ignored.
///
/// The logger that was originally created by the call to to `set_logger` is
/// returned on success. At that point it is guaranteed that no other threads
/// are concurrently accessing the logger object.
#[cfg(feature = "use_std")]
pub fn shutdown_logger() -> Result<Box<Log>, ShutdownLoggerError> {
shutdown_logger_raw().map(|l| unsafe { mem::transmute(l) })
}
/// Shuts down the global logger.
///
/// This function is similar to `shutdown_logger` except that it is usable in
/// `no_std` code.
///
/// This function may only be called once in the lifetime of a program, and may
/// not be called before `set_logger_raw`. Once the global logger has been shut
/// down, it can no longer be re-initialized by `set_logger_raw`. Any log
/// events that occur after the call to `shutdown_logger_raw` completes will be
/// ignored.
///
/// The pointer that was originally passed to `set_logger_raw` is returned on
/// success. At that point it is guaranteed that no other threads are
/// concurrently accessing the logger object.
pub fn shutdown_logger_raw() -> Result<*const Log, ShutdownLoggerError> {
// Set the global log level to stop other thread from logging
MAX_LOG_LEVEL_FILTER.store(0, Ordering::SeqCst);
// Set to INITIALIZING to prevent re-initialization after
if STATE.compare_and_swap(INITIALIZED, INITIALIZING, Ordering::SeqCst) != INITIALIZED {
return Err(ShutdownLoggerError(()));
}
while REFCOUNT.load(Ordering::SeqCst) != 0 {
// FIXME add a sleep here when it doesn't involve timers
}
unsafe {
let logger = LOGGER;
LOGGER = &NopLogger;
Ok(logger)
}
}
/// The type returned by [`set_logger`] if [`set_logger`] has already been called.
/// [`set_logger`]: fn.set_logger.html
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct SetLoggerError(());
impl fmt::Display for SetLoggerError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(SET_LOGGER_ERROR)
}
}
// The Error trait is not available in libcore
#[cfg(feature = "use_std")]
impl error::Error for SetLoggerError {
fn description(&self) -> &str {
SET_LOGGER_ERROR
}
}
/// The type returned by [`shutdown_logger_raw`] if [`shutdown_logger_raw`] has
/// already been called or if [`set_logger_raw`] has not been called yet.
/// [`set_logger_raw`]: fn.set_logger_raw.html
/// [`shutdown_logger_raw`]: fn.shutdown_logger_raw.html
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct ShutdownLoggerError(());
impl fmt::Display for ShutdownLoggerError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(SHUTDOWN_LOGGER_ERROR)
}
}
// The Error trait is not available in libcore
#[cfg(feature = "use_std")]
impl error::Error for ShutdownLoggerError {
fn description(&self) -> &str {
SHUTDOWN_LOGGER_ERROR
}
}
/// The type returned by `from_str` when the string doesn't match any of the log levels.
#[allow(missing_copy_implementations)]
#[derive(Debug, PartialEq)]
pub struct ParseLevelError(());
impl fmt::Display for ParseLevelError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(LEVEL_PARSE_ERROR)
}
}
// The Error trait is not available in libcore
#[cfg(feature = "use_std")]
impl error::Error for ParseLevelError {
fn description(&self) -> &str {
LEVEL_PARSE_ERROR
}
}
/// Deprecated
///
/// Use https://crates.io/crates/log-panics instead.
#[cfg(all(feature = "nightly", feature = "use_std"))]
pub fn log_panics() {
std::panic::set_hook(Box::new(panic::log));
}
// inner module so that the reporting module is log::panic instead of log
#[cfg(all(feature = "nightly", feature = "use_std"))]
mod panic {
use std::panic::PanicInfo;
use std::thread;
pub fn log(info: &PanicInfo) {
let thread = thread::current();
let thread = thread.name().unwrap_or("<unnamed>");
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match info.payload().downcast_ref::<String>() {
Some(s) => &s[..],
None => "Box<Any>",
}
}
};
match info.location() {
Some(location) => {
error!("thread '{}' panicked at '{}': {}:{}",
thread,
msg,
location.file(),
location.line())
}
None => error!("thread '{}' panicked at '{}'", thread, msg),
}
}
}
struct LoggerGuard(&'static Log);
impl Drop for LoggerGuard {
fn drop(&mut self) {
REFCOUNT.fetch_sub(1, Ordering::SeqCst);
}
}
impl Deref for LoggerGuard {
type Target = Log;
fn deref(&self) -> &(Log + 'static) {
self.0
}
}
fn logger() -> Option<LoggerGuard> {
REFCOUNT.fetch_add(1, Ordering::SeqCst);
if STATE.load(Ordering::SeqCst) != INITIALIZED {
REFCOUNT.fetch_sub(1, Ordering::SeqCst);
None
} else {
Some(LoggerGuard(unsafe { &*LOGGER }))
}
}
// WARNING
// This is not considered part of the crate's public API. It is subject to
// change at any time.
#[doc(hidden)]
pub fn __enabled(level: Level, target: &str) -> bool {
if let Some(logger) = logger() {
logger.enabled(&Metadata {
level: level,
target: target,
})
} else {
false
}
}
// WARNING
// This is not considered part of the crate's public API. It is subject to
// change at any time.
#[doc(hidden)]
pub fn __log(level: Level, target: &str, loc: &Location, args: fmt::Arguments) {
if let Some(logger) = logger() {
let record = Record {
metadata: Metadata {
level: level,
target: target,
},
location: loc,
args: args,
};
logger.log(&record)
}
}
// WARNING
// This is not considered part of the crate's public API. It is subject to
// change at any time.
#[inline(always)]
#[doc(hidden)]
pub fn __static_max_level() -> LevelFilter {
if !cfg!(debug_assertions) {
// This is a release build. Check `release_max_level_*` first.
if cfg!(feature = "release_max_level_off") {
return LevelFilter::Off;
} else if cfg!(feature = "release_max_level_error") {
return LevelFilter::Error;
} else if cfg!(feature = "release_max_level_warn") {
return LevelFilter::Warn;
} else if cfg!(feature = "release_max_level_info") {
return LevelFilter::Info;
} else if cfg!(feature = "release_max_level_debug") {
return LevelFilter::Debug;
} else if cfg!(feature = "release_max_level_trace") {
return LevelFilter::Trace;
}
}
if cfg!(feature = "max_level_off") {
LevelFilter::Off
} else if cfg!(feature = "max_level_error") {
LevelFilter::Error
} else if cfg!(feature = "max_level_warn") {
LevelFilter::Warn
} else if cfg!(feature = "max_level_info") {
LevelFilter::Info
} else if cfg!(feature = "max_level_debug") {
LevelFilter::Debug
} else {
LevelFilter::Trace
}
}
#[cfg(test)]
mod tests {
extern crate std;
use tests::std::string::ToString;
use super::{Level, LevelFilter, ParseLevelError};
#[test]
fn test_levelfilter_from_str() {
let tests = [("off", Ok(LevelFilter::Off)),
("error", Ok(LevelFilter::Error)),
("warn", Ok(LevelFilter::Warn)),
("info", Ok(LevelFilter::Info)),
("debug", Ok(LevelFilter::Debug)),
("trace", Ok(LevelFilter::Trace)),
("OFF", Ok(LevelFilter::Off)),
("ERROR", Ok(LevelFilter::Error)),
("WARN", Ok(LevelFilter::Warn)),
("INFO", Ok(LevelFilter::Info)),
("DEBUG", Ok(LevelFilter::Debug)),
("TRACE", Ok(LevelFilter::Trace)),
("asdf", Err(ParseLevelError(())))];
for &(s, ref expected) in &tests {
assert_eq!(expected, &s.parse());
}
}
#[test]
fn test_level_from_str() {
let tests = [("OFF", Err(ParseLevelError(()))),
("error", Ok(Level::Error)),
("warn", Ok(Level::Warn)),
("info", Ok(Level::Info)),
("debug", Ok(Level::Debug)),
("trace", Ok(Level::Trace)),
("ERROR", Ok(Level::Error)),
("WARN", Ok(Level::Warn)),
("INFO", Ok(Level::Info)),
("DEBUG", Ok(Level::Debug)),
("TRACE", Ok(Level::Trace)),
("asdf", Err(ParseLevelError(())))];
for &(s, ref expected) in &tests {
assert_eq!(expected, &s.parse());
}
}
#[test]
fn test_level_show() {
assert_eq!("INFO", Level::Info.to_string());
assert_eq!("ERROR", Level::Error.to_string());
}
#[test]
fn test_levelfilter_show() {
assert_eq!("OFF", LevelFilter::Off.to_string());
assert_eq!("ERROR", LevelFilter::Error.to_string());
}
#[test]
fn test_cross_cmp() {
assert!(Level::Debug > LevelFilter::Error);
assert!(LevelFilter::Warn < Level::Trace);
assert!(LevelFilter::Off < Level::Error);
}
#[test]
fn test_cross_eq() {
assert!(Level::Error == LevelFilter::Error);
assert!(LevelFilter::Off != Level::Error);
assert!(Level::Trace == LevelFilter::Trace);
}
#[test]
fn test_to_level() {
assert_eq!(Some(Level::Error), LevelFilter::Error.to_level());
assert_eq!(None, LevelFilter::Off.to_level());
assert_eq!(Some(Level::Debug), LevelFilter::Debug.to_level());
}
#[test]
fn test_to_level_filter() {
assert_eq!(LevelFilter::Error, Level::Error.to_level_filter());
assert_eq!(LevelFilter::Trace, Level::Trace.to_level_filter());
}
#[test]
#[cfg(feature = "use_std")]
fn test_error_trait() {
use std::error::Error;
use super::SetLoggerError;
let e = SetLoggerError(());
assert_eq!(e.description(),
"attempted to set a logger after the logging system \
was already initialized");
}
}
max_level doc improvements
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A lightweight logging facade.
//!
//! The `log` crate provides a single logging API that abstracts over the
//! actual logging implementation. Libraries can use the logging API provided
//! by this crate, and the consumer of those libraries can choose the logging
//! framework that is most suitable for its use case.
//!
//! If no logging implementation is selected, the facade falls back to a "noop"
//! implementation that ignores all log messages. The overhead in this case
//! is very small - just an integer load, comparison and jump.
//!
//! A log request consists of a _target_, a _level_, and a _body_. A target is a
//! string which defaults to the module path of the location of the log request,
//! though that default may be overridden. Logger implementations typically use
//! the target to filter requests based on some user configuration.
//!
//! # Use
//!
//! ## In libraries
//!
//! Libraries should link only to the `log` crate, and use the provided
//! macros to log whatever information will be useful to downstream consumers.
//!
//! ### Examples
//!
//! ```rust
//! # #![allow(unstable)]
//! #[macro_use]
//! extern crate log;
//!
//! # #[derive(Debug)] pub struct Yak(String);
//! # impl Yak { fn shave(&self, _: u32) {} }
//! # fn find_a_razor() -> Result<u32, u32> { Ok(1) }
//! pub fn shave_the_yak(yak: &Yak) {
//! info!(target: "yak_events", "Commencing yak shaving for {:?}", yak);
//!
//! loop {
//! match find_a_razor() {
//! Ok(razor) => {
//! info!("Razor located: {}", razor);
//! yak.shave(razor);
//! break;
//! }
//! Err(err) => {
//! warn!("Unable to locate a razor: {}, retrying", err);
//! }
//! }
//! }
//! }
//! # fn main() {}
//! ```
//!
//! ## In executables
//!
//! Executables should choose a logging framework and initialize it early in the
//! runtime of the program. Logging frameworks will typically include a
//! function to do this. Any log messages generated before the framework is
//! initialized will be ignored.
//!
//! The executable itself may use the `log` crate to log as well.
//!
//! ### Warning
//!
//! The logging system may only be initialized once.
//!
//! ### Examples
//!
//! ```rust,ignore
//! #[macro_use]
//! extern crate log;
//! extern crate env_logger;
//!
//! fn main() {
//! // Select env_logger, one possible logger implementation
//! // (see https://doc.rust-lang.org/log/env_logger/index.html)
//! env_logger::init().unwrap();
//!
//! info!("starting up");
//! error!("error: {}", 404);
//!
//! // ...
//! }
//! ```
//!
//! # Logger implementations
//!
//! Loggers implement the [`Log`] trait. Here's a very basic example that simply
//! logs all messages at the [`Error`][level_link], [`Warn`][level_link] or
//! [`Info`][level_link] levels to stdout:
//!
//! ```rust
//! extern crate log;
//!
//! use log::{Record, Level, Metadata};
//!
//! struct SimpleLogger;
//!
//! impl log::Log for SimpleLogger {
//! fn enabled(&self, metadata: &Metadata) -> bool {
//! metadata.level() <= Level::Info
//! }
//!
//! fn log(&self, record: &Record) {
//! if self.enabled(record.metadata()) {
//! println!("{} - {}", record.level(), record.args());
//! }
//! }
//! }
//!
//! # fn main() {}
//! ```
//!
//! Loggers are installed by calling the [`set_logger`] function. It takes a
//! closure which is provided a [`MaxLevelFilter`] token and returns a
//! [`Log`] trait object. The [`MaxLevelFilter`] token controls the global
//! maximum log level. The logging facade uses this as an optimization to
//! improve performance of log messages at levels that are disabled. In the
//! case of our example logger, we'll want to set the maximum log level to
//! [`Info`][level_link], since we ignore any [`Debug`][level_link] or
//! [`Trace`][level_link] level log messages. A logging framework should
//! provide a function that wraps a call to [`set_logger`], handling
//! initialization of the logger:
//!
//! ```rust
//! # extern crate log;
//! # use log::{Level, LevelFilter, SetLoggerError, Metadata};
//! # struct SimpleLogger;
//! # impl log::Log for SimpleLogger {
//! # fn enabled(&self, _: &Metadata) -> bool { false }
//! # fn log(&self, _: &log::Record) {}
//! # }
//! # fn main() {}
//! # #[cfg(feature = "use_std")]
//! pub fn init() -> Result<(), SetLoggerError> {
//! log::set_logger(|max_level| {
//! max_level.set(LevelFilter::Info);
//! Box::new(SimpleLogger)
//! })
//! }
//! ```
//!
//! # Use with `no_std`
//!
//! To use the `log` crate without depending on `libstd`, you need to specify
//! `default-features = false` when specifying the dependency in `Cargo.toml`.
//! This makes no difference to libraries using `log` since the logging API
//! remains the same. However executables will need to use the [`set_logger_raw`]
//! function to initialize a logger and the [`shutdown_logger_raw`] function to
//! shut down the global logger before exiting:
//!
//! ```rust
//! # extern crate log;
//! # use log::{Level, LevelFilter, SetLoggerError, ShutdownLoggerError,
//! # Metadata};
//! # struct SimpleLogger;
//! # impl log::Log for SimpleLogger {
//! # fn enabled(&self, _: &Metadata) -> bool { false }
//! # fn log(&self, _: &log::Record) {}
//! # }
//! # impl SimpleLogger {
//! # fn flush(&self) {}
//! # }
//! # fn main() {}
//! pub fn init() -> Result<(), SetLoggerError> {
//! unsafe {
//! log::set_logger_raw(|max_level| {
//! static LOGGER: SimpleLogger = SimpleLogger;
//! max_level.set(LevelFilter::Info);
//! &SimpleLogger
//! })
//! }
//! }
//! pub fn shutdown() -> Result<(), ShutdownLoggerError> {
//! log::shutdown_logger_raw().map(|logger| {
//! let logger = unsafe { &*(logger as *const SimpleLogger) };
//! logger.flush();
//! })
//! }
//! ```
//!
//! # Features
//!
//! Optionally, when defining a `Cargo.toml` file, additional parameters can be passed that affect
//! the logger depending on the target of the build. Effectively, `max_level_*` and
//! `release_max_level_*` directives can be added as features of the log dependency. When
//! these are set, they override the behavior of the logging levels above the declared maximum
//! preventing anything higher from logging.
//!
//! ```toml
//! [dependencies.log]
//! version = "^0.3.7"
//! features = ["max_level_debug", "release_max_level_warn"]
//! ```
//!
//! [`Log`]: trait.Log.html
//! [level_link]: enum.Level.html
//! [`set_logger`]: fn.set_logger.html
//! [`MaxLevelFilter`]: struct.MaxLevelFilter.html
//! [`set_logger_raw`]: fn.set_logger_raw.html
//! [`shutdown_logger_raw`]: fn.shutdown_logger_raw.html
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://www.rust-lang.org/favicon.ico",
html_root_url = "https://docs.rs/log/0.3.8")]
#![warn(missing_docs)]
#![deny(missing_debug_implementations)]
#![cfg_attr(feature = "nightly", feature(panic_handler))]
#![cfg_attr(not(feature = "use_std"), no_std)]
// When compiled for the rustc compiler itself we want to make sure that this is
// an unstable crate
#![cfg_attr(rustbuild, feature(staged_api, rustc_private))]
#![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))]
#[cfg(not(feature = "use_std"))]
extern crate core as std;
use std::cmp;
#[cfg(feature = "use_std")]
use std::error;
use std::fmt;
use std::mem;
use std::ops::Deref;
use std::str::FromStr;
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
#[macro_use]
mod macros;
mod serde;
// The setup here is a bit weird to make shutdown_logger_raw work.
//
// There are four different states that we care about: the logger's
// uninitialized, the logger's initializing (set_logger's been called but
// LOGGER hasn't actually been set yet), the logger's active, or the logger is
// shut down after calling shutdown_logger_raw.
//
// The LOGGER static holds a pointer to the global logger. It is protected by
// the STATE static which determines whether LOGGER has been initialized yet.
//
// The shutdown_logger_raw routine needs to make sure that no threads are
// actively logging before it returns. The number of actively logging threads is
// tracked in the REFCOUNT static. The routine first sets STATE back to
// INITIALIZING. All logging calls past that point will immediately return
// without accessing the logger. At that point, the at_exit routine just waits
// for the refcount to reach 0 before deallocating the logger. Note that the
// refcount does not necessarily monotonically decrease at this point, as new
// log calls still increment and decrement it, but the interval in between is
// small enough that the wait is really just for the active log calls to finish.
static mut LOGGER: *const Log = &NopLogger;
static STATE: AtomicUsize = ATOMIC_USIZE_INIT;
static REFCOUNT: AtomicUsize = ATOMIC_USIZE_INIT;
const UNINITIALIZED: usize = 0;
const INITIALIZING: usize = 1;
const INITIALIZED: usize = 2;
static MAX_LOG_LEVEL_FILTER: AtomicUsize = ATOMIC_USIZE_INIT;
static LOG_LEVEL_NAMES: [&'static str; 6] = ["OFF", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
static SET_LOGGER_ERROR: &'static str = "attempted to set a logger after the logging system \
was already initialized";
static SHUTDOWN_LOGGER_ERROR: &'static str = "attempted to shut down the logger without an active logger";
static LEVEL_PARSE_ERROR: &'static str = "attempted to convert a string that doesn't match an existing log level";
/// An enum representing the available verbosity levels of the logging framework.
///
/// Typical usage includes: checking if a certain `Level` is enabled with
/// [`log_enabled!`](macro.log_enabled.html), specifying the `Level` of
/// [`log!`](macro.log.html), and comparing a `Level` directly to a
/// [`LevelFilter`](enum.LevelFilter.html).
#[repr(usize)]
#[derive(Copy, Eq, Debug, Hash)]
pub enum Level {
/// The "error" level.
///
/// Designates very serious errors.
Error = 1, // This way these line up with the discriminants for LevelFilter below
/// The "warn" level.
///
/// Designates hazardous situations.
Warn,
/// The "info" level.
///
/// Designates useful information.
Info,
/// The "debug" level.
///
/// Designates lower priority information.
Debug,
/// The "trace" level.
///
/// Designates very low priority, often extremely verbose, information.
Trace,
}
impl Clone for Level {
#[inline]
fn clone(&self) -> Level {
*self
}
}
impl PartialEq for Level {
#[inline]
fn eq(&self, other: &Level) -> bool {
*self as usize == *other as usize
}
}
impl PartialEq<LevelFilter> for Level {
#[inline]
fn eq(&self, other: &LevelFilter) -> bool {
*self as usize == *other as usize
}
}
impl PartialOrd for Level {
#[inline]
fn partial_cmp(&self, other: &Level) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd<LevelFilter> for Level {
#[inline]
fn partial_cmp(&self, other: &LevelFilter) -> Option<cmp::Ordering> {
Some((*self as usize).cmp(&(*other as usize)))
}
}
impl Ord for Level {
#[inline]
fn cmp(&self, other: &Level) -> cmp::Ordering {
(*self as usize).cmp(&(*other as usize))
}
}
fn ok_or<T, E>(t: Option<T>, e: E) -> Result<T, E> {
match t {
Some(t) => Ok(t),
None => Err(e),
}
}
// Reimplemented here because std::ascii is not available in libcore
fn eq_ignore_ascii_case(a: &str, b: &str) -> bool {
fn to_ascii_uppercase(c: u8) -> u8 {
if c >= b'a' && c <= b'z' {
c - b'a' + b'A'
} else {
c
}
}
if a.len() == b.len() {
a.bytes()
.zip(b.bytes())
.all(|(a, b)| to_ascii_uppercase(a) == to_ascii_uppercase(b))
} else {
false
}
}
impl FromStr for Level {
type Err = ParseLevelError;
fn from_str(level: &str) -> Result<Level, Self::Err> {
ok_or(LOG_LEVEL_NAMES
.iter()
.position(|&name| eq_ignore_ascii_case(name, level))
.into_iter()
.filter(|&idx| idx != 0)
.map(|idx| Level::from_usize(idx).unwrap())
.next(),
ParseLevelError(()))
}
}
impl fmt::Display for Level {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.pad(LOG_LEVEL_NAMES[*self as usize])
}
}
impl Level {
fn from_usize(u: usize) -> Option<Level> {
match u {
1 => Some(Level::Error),
2 => Some(Level::Warn),
3 => Some(Level::Info),
4 => Some(Level::Debug),
5 => Some(Level::Trace),
_ => None,
}
}
/// Returns the most verbose logging level.
#[inline]
pub fn max() -> Level {
Level::Trace
}
/// Converts the `Level` to the equivalent `LevelFilter`.
#[inline]
pub fn to_level_filter(&self) -> LevelFilter {
LevelFilter::from_usize(*self as usize).unwrap()
}
}
/// An enum representing the available verbosity level filters of the logging
/// framework.
///
/// A `LevelFilter` may be compared directly to a [`Level`](enum.Level.html).
/// Use this type to [`get()`](struct.MaxLevelFilter.html#method.get) and
/// [`set()`](struct.MaxLevelFilter.html#method.set) the
/// [`MaxLevelFilter`](struct.MaxLevelFilter.html), or to match with the getter
/// [`max_level()`](fn.max_level.html).
#[repr(usize)]
#[derive(Copy, Eq, Debug, Hash)]
pub enum LevelFilter {
/// A level lower than all log levels.
Off,
/// Corresponds to the `Error` log level.
Error,
/// Corresponds to the `Warn` log level.
Warn,
/// Corresponds to the `Info` log level.
Info,
/// Corresponds to the `Debug` log level.
Debug,
/// Corresponds to the `Trace` log level.
Trace,
}
// Deriving generates terrible impls of these traits
impl Clone for LevelFilter {
#[inline]
fn clone(&self) -> LevelFilter {
*self
}
}
impl PartialEq for LevelFilter {
#[inline]
fn eq(&self, other: &LevelFilter) -> bool {
*self as usize == *other as usize
}
}
impl PartialEq<Level> for LevelFilter {
#[inline]
fn eq(&self, other: &Level) -> bool {
other.eq(self)
}
}
impl PartialOrd for LevelFilter {
#[inline]
fn partial_cmp(&self, other: &LevelFilter) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialOrd<Level> for LevelFilter {
#[inline]
fn partial_cmp(&self, other: &Level) -> Option<cmp::Ordering> {
other.partial_cmp(self).map(|x| x.reverse())
}
}
impl Ord for LevelFilter {
#[inline]
fn cmp(&self, other: &LevelFilter) -> cmp::Ordering {
(*self as usize).cmp(&(*other as usize))
}
}
impl FromStr for LevelFilter {
type Err = ParseLevelError;
fn from_str(level: &str) -> Result<LevelFilter, Self::Err> {
ok_or(LOG_LEVEL_NAMES
.iter()
.position(|&name| eq_ignore_ascii_case(name, level))
.map(|p| LevelFilter::from_usize(p).unwrap()),
ParseLevelError(()))
}
}
impl fmt::Display for LevelFilter {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", LOG_LEVEL_NAMES[*self as usize])
}
}
impl LevelFilter {
fn from_usize(u: usize) -> Option<LevelFilter> {
match u {
0 => Some(LevelFilter::Off),
1 => Some(LevelFilter::Error),
2 => Some(LevelFilter::Warn),
3 => Some(LevelFilter::Info),
4 => Some(LevelFilter::Debug),
5 => Some(LevelFilter::Trace),
_ => None,
}
}
/// Returns the most verbose logging level filter.
#[inline]
pub fn max() -> LevelFilter {
LevelFilter::Trace
}
/// Converts `self` to the equivalent `Level`.
///
/// Returns `None` if `self` is `LevelFilter::Off`.
#[inline]
pub fn to_level(&self) -> Option<Level> {
Level::from_usize(*self as usize)
}
}
/// The "payload" of a log message.
///
/// # Use
///
/// `Record` structures are passed as parameters to the [`log`][method.log]
/// method of the [`Log`] trait. Logger implementors manipulate these
/// structures in order to display log messages. `Record`s are automatically
/// created by the [`log!`] macro and so are not seen by log users.
///
/// Note that the [`level()`] and [`target()`] accessors are equivalent to
/// `self.metadata().level()` and `self.metadata().target()` respectively.
/// These methods are provided as a convenience for users of this structure.
///
/// ## Example
///
/// The following example shows a simple logger that displays the level,
/// module path, and message of any `Record` that is passed to it.
///
/// ```rust
/// # extern crate log;
/// struct SimpleLogger;
///
/// impl log::Log for SimpleLogger {
/// fn enabled(&self, metadata: &log::Metadata) -> bool {
/// true
/// }
///
/// fn log(&self, record: &log::Record) {
/// if !self.enabled(record.metadata()) {
/// return;
/// }
///
/// println!("{}:{} -- {}",
/// record.level(),
/// record.location().module_path(),
/// record.args());
/// }
/// }
/// ```
///
/// [method.log]: trait.Log.html#method.log
/// [`Log`]: trait.Log.html
/// [`log!`]: macro.log.html
/// [`level()`]: struct.Record.html#method.level
/// [`target()`]: struct.Record.html#method.target
#[derive(Debug)]
pub struct Record<'a> {
metadata: Metadata<'a>,
location: &'a Location,
args: fmt::Arguments<'a>,
}
impl<'a> Record<'a> {
/// The message body.
pub fn args(&self) -> &fmt::Arguments<'a> {
&self.args
}
/// Metadata about the log directive.
pub fn metadata(&self) -> &Metadata {
&self.metadata
}
/// The location of the log directive.
pub fn location(&self) -> &Location {
self.location
}
/// The verbosity level of the message.
pub fn level(&self) -> Level {
self.metadata.level()
}
/// The name of the target of the directive.
pub fn target(&self) -> &str {
self.metadata.target()
}
}
/// Metadata about a log message.
///
/// # Use
///
/// `Metadata` structs are created when users of the library use
/// logging macros.
///
/// They are consumed by implementations of the `Log` trait in the
/// `enabled` method.
///
/// `Record`s use `Metadata` to determine the log message's severity
/// and target.
///
/// Users should use the `log_enabled!` macro in their code to avoid
/// constructing expensive log messages.
///
/// # Examples
///
/// ```rust
/// # #[macro_use]
/// # extern crate log;
/// #
/// use log::{Record, Level, Metadata};
///
/// struct MyLogger;
///
/// impl log::Log for MyLogger {
/// fn enabled(&self, metadata: &Metadata) -> bool {
/// metadata.level() <= Level::Info
/// }
///
/// fn log(&self, record: &Record) {
/// if self.enabled(record.metadata()) {
/// println!("{} - {}", record.level(), record.args());
/// }
/// }
/// }
///
/// # fn main(){}
/// ```
#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub struct Metadata<'a> {
level: Level,
target: &'a str,
}
impl<'a> Metadata<'a> {
/// The verbosity level of the message.
pub fn level(&self) -> Level {
self.level
}
/// The name of the target of the directive.
pub fn target(&self) -> &str {
self.target
}
}
/// A trait encapsulating the operations required of a logger
pub trait Log: Sync + Send {
/// Determines if a log message with the specified metadata would be
/// logged.
///
/// This is used by the `log_enabled!` macro to allow callers to avoid
/// expensive computation of log message arguments if the message would be
/// discarded anyway.
fn enabled(&self, metadata: &Metadata) -> bool;
/// Logs the `Record`.
///
/// Note that `enabled` is *not* necessarily called before this method.
/// Implementations of `log` should perform all necessary filtering
/// internally.
fn log(&self, record: &Record);
}
// Just used as a dummy initial value for LOGGER
struct NopLogger;
impl Log for NopLogger {
fn enabled(&self, _: &Metadata) -> bool {
false
}
fn log(&self, _: &Record) {}
}
/// The location of a log message.
///
/// # Use
///
/// `Location` structs are created by the [`log!`] macro. They are attached to
/// [`Record`] structs, which are used by loggers to display logging messages.
/// `Location`s can be accessed using the [`location()`] method on [`Record`]s.
/// Log users do not need to directly use this struct.
///
/// ## Example
/// The below example shows a simple logger that prints the module path,
/// file name, and line number of the location the [`log!`] macro was called.
///
/// ```rust
/// # extern crate log;
/// struct SimpleLogger;
///
/// impl log::Log for SimpleLogger {
/// fn enabled(&self, metadata: &log::Metadata) -> bool {
/// true
/// }
///
/// fn log(&self, record: &log::Record) {
/// if !self.enabled(record.metadata()) {
/// return;
/// }
///
/// let location = record.location();
/// println!("{}:{}:{} -- {}",
/// location.module_path(),
/// location.file(),
/// location.line(),
/// record.args());
/// }
/// }
/// ```
///
/// # Warning
///
/// The fields of this struct are public so that they may be initialized by the
/// [`log!`] macro. They are subject to change at any time and should never be
/// accessed directly.
///
/// [`log!`]: macro.log.html
/// [`Record`]: struct.Record.html
/// [`location()`]: struct.Record.html#method.location
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Location {
#[doc(hidden)]
pub __module_path: &'static str,
#[doc(hidden)]
pub __file: &'static str,
#[doc(hidden)]
pub __line: u32,
}
impl Location {
/// The module path of the message.
pub fn module_path(&self) -> &str {
self.__module_path
}
/// The source file containing the message.
pub fn file(&self) -> &str {
self.__file
}
/// The line containing the message.
pub fn line(&self) -> u32 {
self.__line
}
}
/// A token providing read and write access to the global maximum log level
/// filter.
///
/// The maximum log level is used as an optimization to avoid evaluating log
/// messages that will be ignored by the logger. Any message with a level
/// higher than the maximum log level filter will be ignored. A logger should
/// make sure to keep the maximum log level filter in sync with its current
/// configuration.
#[allow(missing_copy_implementations)]
pub struct MaxLevelFilter(());
impl fmt::Debug for MaxLevelFilter {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "MaxLevelFilter")
}
}
impl MaxLevelFilter {
/// Gets the current maximum log level filter.
pub fn get(&self) -> LevelFilter {
max_level()
}
/// Sets the maximum log level.
pub fn set(&self, level: LevelFilter) {
MAX_LOG_LEVEL_FILTER.store(level as usize, Ordering::SeqCst)
}
}
/// Returns the current maximum log level.
///
/// The [`log!`], [`error!`], [`warn!`], [`info!`], [`debug!`], and [`trace!`] macros check
/// this value and discard any message logged at a higher level. The maximum
/// log level is set by the `MaxLevel` token passed to loggers.
///
/// [`log!`]: macro.log.html
/// [`error!`]: macro.error.html
/// [`warn!`]: macro.warn.html
/// [`info!`]: macro.info.html
/// [`debug!`]: macro.debug.html
/// [`trace!`]: macro.trace.html
#[inline(always)]
pub fn max_level() -> LevelFilter {
unsafe { mem::transmute(MAX_LOG_LEVEL_FILTER.load(Ordering::Relaxed)) }
}
/// Sets the global logger.
///
/// The `make_logger` closure is passed a `MaxLevel` object, which the
/// logger should use to keep the global maximum log level in sync with the
/// highest log level that the logger will not ignore.
///
/// This function may only be called once in the lifetime of a program. Any log
/// events that occur before the call to `set_logger` completes will be
/// ignored.
///
/// This function does not typically need to be called manually. Logger
/// implementations should provide an initialization method that calls
/// `set_logger` internally.
///
/// Requires the `use_std` feature (enabled by default).
#[cfg(feature = "use_std")]
pub fn set_logger<M>(make_logger: M) -> Result<(), SetLoggerError>
where M: FnOnce(MaxLevelFilter) -> Box<Log>
{
unsafe { set_logger_raw(|max_level| mem::transmute(make_logger(max_level))) }
}
/// Sets the global logger from a raw pointer.
///
/// This function is similar to `set_logger` except that it is usable in
/// `no_std` code.
///
/// The `make_logger` closure is passed a `MaxLevel` object, which the
/// logger should use to keep the global maximum log level in sync with the
/// highest log level that the logger will not ignore.
///
/// This function may only be called once in the lifetime of a program. Any log
/// events that occur before the call to `set_logger_raw` completes will be
/// ignored.
///
/// This function does not typically need to be called manually. Logger
/// implementations should provide an initialization method that calls
/// `set_logger_raw` internally.
///
/// # Safety
///
/// The pointer returned by `make_logger` must remain valid for the entire
/// duration of the program or until `shutdown_logger_raw` is called. In
/// addition, `shutdown_logger` *must not* be called after this function.
pub unsafe fn set_logger_raw<M>(make_logger: M) -> Result<(), SetLoggerError>
where M: FnOnce(MaxLevelFilter) -> *const Log
{
if STATE.compare_and_swap(UNINITIALIZED, INITIALIZING, Ordering::SeqCst) != UNINITIALIZED {
return Err(SetLoggerError(()));
}
LOGGER = make_logger(MaxLevelFilter(()));
STATE.store(INITIALIZED, Ordering::SeqCst);
Ok(())
}
/// Shuts down the global logger.
///
/// This function may only be called once in the lifetime of a program, and may
/// not be called before `set_logger`. Once the global logger has been shut
/// down, it can no longer be re-initialized by `set_logger`. Any log events
/// that occur after the call to `shutdown_logger` completes will be ignored.
///
/// The logger that was originally created by the call to to `set_logger` is
/// returned on success. At that point it is guaranteed that no other threads
/// are concurrently accessing the logger object.
#[cfg(feature = "use_std")]
pub fn shutdown_logger() -> Result<Box<Log>, ShutdownLoggerError> {
shutdown_logger_raw().map(|l| unsafe { mem::transmute(l) })
}
/// Shuts down the global logger.
///
/// This function is similar to `shutdown_logger` except that it is usable in
/// `no_std` code.
///
/// This function may only be called once in the lifetime of a program, and may
/// not be called before `set_logger_raw`. Once the global logger has been shut
/// down, it can no longer be re-initialized by `set_logger_raw`. Any log
/// events that occur after the call to `shutdown_logger_raw` completes will be
/// ignored.
///
/// The pointer that was originally passed to `set_logger_raw` is returned on
/// success. At that point it is guaranteed that no other threads are
/// concurrently accessing the logger object.
pub fn shutdown_logger_raw() -> Result<*const Log, ShutdownLoggerError> {
// Set the global log level to stop other thread from logging
MAX_LOG_LEVEL_FILTER.store(0, Ordering::SeqCst);
// Set to INITIALIZING to prevent re-initialization after
if STATE.compare_and_swap(INITIALIZED, INITIALIZING, Ordering::SeqCst) != INITIALIZED {
return Err(ShutdownLoggerError(()));
}
while REFCOUNT.load(Ordering::SeqCst) != 0 {
// FIXME add a sleep here when it doesn't involve timers
}
unsafe {
let logger = LOGGER;
LOGGER = &NopLogger;
Ok(logger)
}
}
/// The type returned by [`set_logger`] if [`set_logger`] has already been called.
/// [`set_logger`]: fn.set_logger.html
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct SetLoggerError(());
impl fmt::Display for SetLoggerError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(SET_LOGGER_ERROR)
}
}
// The Error trait is not available in libcore
#[cfg(feature = "use_std")]
impl error::Error for SetLoggerError {
fn description(&self) -> &str {
SET_LOGGER_ERROR
}
}
/// The type returned by [`shutdown_logger_raw`] if [`shutdown_logger_raw`] has
/// already been called or if [`set_logger_raw`] has not been called yet.
/// [`set_logger_raw`]: fn.set_logger_raw.html
/// [`shutdown_logger_raw`]: fn.shutdown_logger_raw.html
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct ShutdownLoggerError(());
impl fmt::Display for ShutdownLoggerError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(SHUTDOWN_LOGGER_ERROR)
}
}
// The Error trait is not available in libcore
#[cfg(feature = "use_std")]
impl error::Error for ShutdownLoggerError {
fn description(&self) -> &str {
SHUTDOWN_LOGGER_ERROR
}
}
/// The type returned by `from_str` when the string doesn't match any of the log levels.
#[allow(missing_copy_implementations)]
#[derive(Debug, PartialEq)]
pub struct ParseLevelError(());
impl fmt::Display for ParseLevelError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(LEVEL_PARSE_ERROR)
}
}
// The Error trait is not available in libcore
#[cfg(feature = "use_std")]
impl error::Error for ParseLevelError {
fn description(&self) -> &str {
LEVEL_PARSE_ERROR
}
}
/// Deprecated
///
/// Use https://crates.io/crates/log-panics instead.
#[cfg(all(feature = "nightly", feature = "use_std"))]
pub fn log_panics() {
std::panic::set_hook(Box::new(panic::log));
}
// inner module so that the reporting module is log::panic instead of log
#[cfg(all(feature = "nightly", feature = "use_std"))]
mod panic {
use std::panic::PanicInfo;
use std::thread;
pub fn log(info: &PanicInfo) {
let thread = thread::current();
let thread = thread.name().unwrap_or("<unnamed>");
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match info.payload().downcast_ref::<String>() {
Some(s) => &s[..],
None => "Box<Any>",
}
}
};
match info.location() {
Some(location) => {
error!("thread '{}' panicked at '{}': {}:{}",
thread,
msg,
location.file(),
location.line())
}
None => error!("thread '{}' panicked at '{}'", thread, msg),
}
}
}
struct LoggerGuard(&'static Log);
impl Drop for LoggerGuard {
fn drop(&mut self) {
REFCOUNT.fetch_sub(1, Ordering::SeqCst);
}
}
impl Deref for LoggerGuard {
type Target = Log;
fn deref(&self) -> &(Log + 'static) {
self.0
}
}
fn logger() -> Option<LoggerGuard> {
REFCOUNT.fetch_add(1, Ordering::SeqCst);
if STATE.load(Ordering::SeqCst) != INITIALIZED {
REFCOUNT.fetch_sub(1, Ordering::SeqCst);
None
} else {
Some(LoggerGuard(unsafe { &*LOGGER }))
}
}
// WARNING
// This is not considered part of the crate's public API. It is subject to
// change at any time.
#[doc(hidden)]
pub fn __enabled(level: Level, target: &str) -> bool {
if let Some(logger) = logger() {
logger.enabled(&Metadata {
level: level,
target: target,
})
} else {
false
}
}
// WARNING
// This is not considered part of the crate's public API. It is subject to
// change at any time.
#[doc(hidden)]
pub fn __log(level: Level, target: &str, loc: &Location, args: fmt::Arguments) {
if let Some(logger) = logger() {
let record = Record {
metadata: Metadata {
level: level,
target: target,
},
location: loc,
args: args,
};
logger.log(&record)
}
}
// WARNING
// This is not considered part of the crate's public API. It is subject to
// change at any time.
#[inline(always)]
#[doc(hidden)]
pub fn __static_max_level() -> LevelFilter {
if !cfg!(debug_assertions) {
// This is a release build. Check `release_max_level_*` first.
if cfg!(feature = "release_max_level_off") {
return LevelFilter::Off;
} else if cfg!(feature = "release_max_level_error") {
return LevelFilter::Error;
} else if cfg!(feature = "release_max_level_warn") {
return LevelFilter::Warn;
} else if cfg!(feature = "release_max_level_info") {
return LevelFilter::Info;
} else if cfg!(feature = "release_max_level_debug") {
return LevelFilter::Debug;
} else if cfg!(feature = "release_max_level_trace") {
return LevelFilter::Trace;
}
}
if cfg!(feature = "max_level_off") {
LevelFilter::Off
} else if cfg!(feature = "max_level_error") {
LevelFilter::Error
} else if cfg!(feature = "max_level_warn") {
LevelFilter::Warn
} else if cfg!(feature = "max_level_info") {
LevelFilter::Info
} else if cfg!(feature = "max_level_debug") {
LevelFilter::Debug
} else {
LevelFilter::Trace
}
}
#[cfg(test)]
mod tests {
extern crate std;
use tests::std::string::ToString;
use super::{Level, LevelFilter, ParseLevelError};
#[test]
fn test_levelfilter_from_str() {
let tests = [("off", Ok(LevelFilter::Off)),
("error", Ok(LevelFilter::Error)),
("warn", Ok(LevelFilter::Warn)),
("info", Ok(LevelFilter::Info)),
("debug", Ok(LevelFilter::Debug)),
("trace", Ok(LevelFilter::Trace)),
("OFF", Ok(LevelFilter::Off)),
("ERROR", Ok(LevelFilter::Error)),
("WARN", Ok(LevelFilter::Warn)),
("INFO", Ok(LevelFilter::Info)),
("DEBUG", Ok(LevelFilter::Debug)),
("TRACE", Ok(LevelFilter::Trace)),
("asdf", Err(ParseLevelError(())))];
for &(s, ref expected) in &tests {
assert_eq!(expected, &s.parse());
}
}
#[test]
fn test_level_from_str() {
let tests = [("OFF", Err(ParseLevelError(()))),
("error", Ok(Level::Error)),
("warn", Ok(Level::Warn)),
("info", Ok(Level::Info)),
("debug", Ok(Level::Debug)),
("trace", Ok(Level::Trace)),
("ERROR", Ok(Level::Error)),
("WARN", Ok(Level::Warn)),
("INFO", Ok(Level::Info)),
("DEBUG", Ok(Level::Debug)),
("TRACE", Ok(Level::Trace)),
("asdf", Err(ParseLevelError(())))];
for &(s, ref expected) in &tests {
assert_eq!(expected, &s.parse());
}
}
#[test]
fn test_level_show() {
assert_eq!("INFO", Level::Info.to_string());
assert_eq!("ERROR", Level::Error.to_string());
}
#[test]
fn test_levelfilter_show() {
assert_eq!("OFF", LevelFilter::Off.to_string());
assert_eq!("ERROR", LevelFilter::Error.to_string());
}
#[test]
fn test_cross_cmp() {
assert!(Level::Debug > LevelFilter::Error);
assert!(LevelFilter::Warn < Level::Trace);
assert!(LevelFilter::Off < Level::Error);
}
#[test]
fn test_cross_eq() {
assert!(Level::Error == LevelFilter::Error);
assert!(LevelFilter::Off != Level::Error);
assert!(Level::Trace == LevelFilter::Trace);
}
#[test]
fn test_to_level() {
assert_eq!(Some(Level::Error), LevelFilter::Error.to_level());
assert_eq!(None, LevelFilter::Off.to_level());
assert_eq!(Some(Level::Debug), LevelFilter::Debug.to_level());
}
#[test]
fn test_to_level_filter() {
assert_eq!(LevelFilter::Error, Level::Error.to_level_filter());
assert_eq!(LevelFilter::Trace, Level::Trace.to_level_filter());
}
#[test]
#[cfg(feature = "use_std")]
fn test_error_trait() {
use std::error::Error;
use super::SetLoggerError;
let e = SetLoggerError(());
assert_eq!(e.description(),
"attempted to set a logger after the logging system \
was already initialized");
}
}
|
//! A crate that provides support for the half-precision floating point type.
//!
//! This crate provides the `f16` type, which is an implementation of the IEEE 754-2008 `binary16`
//! floating point type. This 'half' precision floating point type is intended for efficient storage
//! where the full range and precision of a larger floating point value is not required. This is
//! especially useful for image storage formats.
//!
//! Because `f16` is primarily for efficient storage, floating point operations are not implemented.
//! Operations should be performed with `f32` or higher-precision types and converted to/from `f16`
//! as necessary.
//!
//! Some hardware architectures provide support for 16-bit floating point conversions. Enable the
//! `use-intrinsics` feature to use LLVM intrinsics for hardware conversions. This crate does no
//! checks on whether the hardware supports the feature. This feature currently only works on
//! nightly Rust due to a compiler feature gate.
//!
//! Support for `serde` crate `Serialize` and `Deserialize` traits is provided when the `serde`
//! feature is enabled. This adds a dependency on `serde` crate so is an optional feature that works
//! on Rust 1.15 or newer.
//!
//! The crate uses `#[no_std]` by default, so can be used in embedded environments without using the
//! Rust `std` library. A `std` feature is available, which enables additional utilities using the
//! `std` library, such as the `vec` module that provides zero-copy `Vec` conversions.
#![warn(
missing_docs, missing_copy_implementations, missing_debug_implementations, trivial_casts,
trivial_numeric_casts, unused_extern_crates, unused_import_braces
)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature = "use-intrinsics", feature(link_llvm_intrinsics))]
#[cfg(feature = "serde")]
#[macro_use]
extern crate serde;
#[cfg(feature = "std")]
extern crate core;
use core::cmp::Ordering;
use core::fmt::{Debug, Display, Error, Formatter, LowerExp, UpperExp};
use core::num::{FpCategory, ParseFloatError};
use core::str::FromStr;
/// The 16-bit floating point type.
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct f16(u16);
pub mod consts {
//! Useful `f16` constants.
use super::f16;
/// 16-bit equivalent of `std::f32::DIGITS`
pub const DIGITS: u32 = 3;
/// 16-bit floating point epsilon. `9.7656e-4`
pub const EPSILON: f16 = f16(0x1400u16);
/// 16-bit positive infinity.
pub const INFINITY: f16 = f16(0x7C00u16);
/// 16-bit equivalent of `std::f32::MANTISSA_DIGITS`
pub const MANTISSA_DIGITS: u32 = 11;
/// Largest finite `f16` value. `65504`
pub const MAX: f16 = f16(0x7BFF);
/// 16-bit equivalent of `std::f32::MAX_10_EXP`
pub const MAX_10_EXP: i32 = 5;
/// 16-bit equivalent of `std::f32::MAX_EXP`
pub const MAX_EXP: i32 = 16;
/// Smallest finite `f16` value. `-65504`
pub const MIN: f16 = f16(0xFBFF);
/// 16-bit equivalent of `std::f32::MIN_10_EXP`
pub const MIN_10_EXP: i32 = -4;
/// 16-bit equivalent of `std::f32::MIN_EXP`
pub const MIN_EXP: i32 = -13;
/// Smallest positive, normalized `f16` value. Approx. `6.10352e−5`
pub const MIN_POSITIVE: f16 = f16(0x0400u16);
/// 16-bit NaN.
pub const NAN: f16 = f16(0x7E00u16);
/// 16-bit negative infinity.
pub const NEG_INFINITY: f16 = f16(0xFC00u16);
/// 16-bit equivalent of `std::f32::RADIX`
pub const RADIX: u32 = 2;
/// 16-bit minimum positive subnormal value. Approx. `5.96046e−8`
pub const MIN_POSITIVE_SUBNORMAL: f16 = f16(0x0001u16);
/// 16-bit maximum subnormal value. Approx. `6.09756e−5`
pub const MAX_SUBNORMAL: f16 = f16(0x03FFu16);
/// 16-bit floating point `1.0`
pub const ONE: f16 = f16(0x3C00u16);
/// 16-bit floating point `0.0`
pub const ZERO: f16 = f16(0x0000u16);
/// 16-bit floating point `-0.0`
pub const NEG_ZERO: f16 = f16(0x8000u16);
/// Euler's number.
pub const E: f16 = f16(0x4170u16);
/// Archimedes' constant.
pub const PI: f16 = f16(0x4248u16);
/// 1.0/pi
pub const FRAC_1_PI: f16 = f16(0x3518u16);
/// 1.0/sqrt(2.0)
pub const FRAC_1_SQRT_2: f16 = f16(0x39A8u16);
/// 2.0/pi
pub const FRAC_2_PI: f16 = f16(0x3918u16);
/// 2.0/sqrt(pi)
pub const FRAC_2_SQRT_PI: f16 = f16(0x3C83u16);
/// pi/2.0
pub const FRAC_PI_2: f16 = f16(0x3E48u16);
/// pi/3.0
pub const FRAC_PI_3: f16 = f16(0x3C30u16);
/// pi/4.0
pub const FRAC_PI_4: f16 = f16(0x3A48u16);
/// pi/6.0
pub const FRAC_PI_6: f16 = f16(0x3830u16);
/// pi/8.0
pub const FRAC_PI_8: f16 = f16(0x3648u16);
/// ln(10.0)
pub const LN_10: f16 = f16(0x409Bu16);
/// ln(2.0)
pub const LN_2: f16 = f16(0x398Cu16);
/// log10(e)
pub const LOG10_E: f16 = f16(0x36F3u16);
/// log2(e)
pub const LOG2_E: f16 = f16(0x3DC5u16);
/// sqrt(2)
pub const SQRT_2: f16 = f16(0x3DA8u16);
}
impl f16 {
/// Constructs a 16-bit floating point value from the raw bits.
#[inline]
pub fn from_bits(bits: u16) -> f16 {
f16(bits)
}
/// Constructs a 16-bit floating point value from a 32-bit floating point value.
///
/// If the 32-bit value is to large to fit in 16-bits, +/- infinity will result. NaN values are
/// preserved. 32-bit subnormal values are too tiny to be represented in 16-bits and result in
/// +/- 0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit subnormals
/// or +/- 0. All other values are truncated and rounded to the nearest representable 16-bit
/// value.
#[inline]
pub fn from_f32(value: f32) -> f16 {
f16(convert::f32_to_f16(value))
}
/// Constructs a 16-bit floating point value from a 64-bit floating point value.
///
/// If the 64-bit value is to large to fit in 16-bits, +/- infinity will result. NaN values are
/// preserved. 64-bit subnormal values are too tiny to be represented in 16-bits and result in
/// +/- 0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit subnormals
/// or +/- 0. All other values are truncated and rounded to the nearest representable 16-bit
/// value.
#[inline]
pub fn from_f64(value: f64) -> f16 {
f16(convert::f64_to_f16(value))
}
/// Converts an `f16` into the underlying bit representation.
#[inline]
pub fn to_bits(self) -> u16 {
self.0
}
/// Converts an `f16` into the underlying bit representation.
#[deprecated(since = "1.2.0", note = "renamed to to_bits")]
#[inline]
pub fn as_bits(self) -> u16 {
self.to_bits()
}
/// Converts an `f16` value in a `f32` value.
///
/// This conversion is lossless as all 16-bit floating point values can be represented exactly
/// in 32-bit floating point.
#[inline]
pub fn to_f32(self) -> f32 {
convert::f16_to_f32(self.0)
}
/// Converts an `f16` value in a `f64` value.
///
/// This conversion is lossless as all 16-bit floating point values can be represented exactly
/// in 64-bit floating point.
#[inline]
pub fn to_f64(self) -> f64 {
convert::f16_to_f64(self.0)
}
/// Returns `true` if this value is `NaN` and `false` otherwise.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let nan = half::consts::NAN;
/// let f = f16::from_f32(7.0_f32);
///
/// assert!(nan.is_nan());
/// assert!(!f.is_nan());
/// ```
#[inline]
pub fn is_nan(self) -> bool {
self.0 & 0x7FFFu16 > 0x7C00u16
}
/// Returns `true` if this value is positive infinity or negative infinity and `false`
/// otherwise.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let f = f16::from_f32(7.0f32);
/// let inf = half::consts::INFINITY;
/// let neg_inf = half::consts::NEG_INFINITY;
/// let nan = half::consts::NAN;
///
/// assert!(!f.is_infinite());
/// assert!(!nan.is_infinite());
///
/// assert!(inf.is_infinite());
/// assert!(neg_inf.is_infinite());
/// ```
#[inline]
pub fn is_infinite(self) -> bool {
self.0 & 0x7FFFu16 == 0x7C00u16
}
/// Returns `true` if this number is neither infinite nor `NaN`.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let f = f16::from_f32(7.0f32);
/// let inf = half::consts::INFINITY;
/// let neg_inf = half::consts::NEG_INFINITY;
/// let nan = half::consts::NAN;
///
/// assert!(f.is_finite());
///
/// assert!(!nan.is_finite());
/// assert!(!inf.is_finite());
/// assert!(!neg_inf.is_finite());
/// ```
#[inline]
pub fn is_finite(self) -> bool {
self.0 & 0x7C00u16 != 0x7C00u16
}
/// Returns `true` if the number is neither zero, infinite, subnormal, or `NaN`.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let min = half::consts::MIN_POSITIVE;
/// let max = half::consts::MAX;
/// let lower_than_min = f16::from_f32(1.0e-10_f32);
/// let zero = f16::from_f32(0.0_f32);
///
/// assert!(min.is_normal());
/// assert!(max.is_normal());
///
/// assert!(!zero.is_normal());
/// assert!(!half::consts::NAN.is_normal());
/// assert!(!half::consts::INFINITY.is_normal());
/// // Values between `0` and `min` are Subnormal.
/// assert!(!lower_than_min.is_normal());
/// ```
#[inline]
pub fn is_normal(self) -> bool {
let exp = self.0 & 0x7C00u16;
exp != 0x7C00u16 && exp != 0
}
/// Returns the floating point category of the number.
///
/// If only one property is going to be tested, it is generally faster to use the specific
/// predicate instead.
///
/// # Examples
///
/// ```rust
/// use std::num::FpCategory;
/// use half::f16;
///
/// let num = f16::from_f32(12.4_f32);
/// let inf = half::consts::INFINITY;
///
/// assert_eq!(num.classify(), FpCategory::Normal);
/// assert_eq!(inf.classify(), FpCategory::Infinite);
/// ```
pub fn classify(self) -> FpCategory {
let exp = self.0 & 0x7C00u16;
let man = self.0 & 0x03FFu16;
if exp == 0 {
if man == 0 {
FpCategory::Zero
} else {
FpCategory::Subnormal
}
} else if exp == 0x7C00u16 {
if man == 0 {
FpCategory::Infinite
} else {
FpCategory::Nan
}
} else {
FpCategory::Normal
}
}
/// Returns a number that represents the sign of `self`.
///
/// * `1.0` if the number is positive, `+0.0` or `INFINITY`
/// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// * `NAN` if the number is `NAN`
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let f = f16::from_f32(3.5_f32);
///
/// assert_eq!(f.signum(), f16::from_f32(1.0));
/// assert_eq!(half::consts::NEG_INFINITY.signum(), f16::from_f32(-1.0));
///
/// assert!(half::consts::NAN.signum().is_nan());
/// ```
pub fn signum(self) -> f16 {
if self.is_nan() {
self
} else if self.0 & 0x8000u16 != 0 {
f16::from_f32(-1.0)
} else {
f16::from_f32(1.0)
}
}
/// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaNs` with
/// positive sign bit and positive infinity.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let nan = half::consts::NAN;
/// let f = f16::from_f32(7.0_f32);
/// let g = f16::from_f32(-7.0_f32);
///
/// assert!(f.is_sign_positive());
/// assert!(!g.is_sign_positive());
/// // `NaN` can be either positive or negative
/// assert!(nan.is_sign_positive() != nan.is_sign_negative());
/// ```
#[inline]
pub fn is_sign_positive(self) -> bool {
self.0 & 0x8000u16 == 0
}
/// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaNs` with
/// negative sign bit and negative infinity.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let nan = half::consts::NAN;
/// let f = f16::from_f32(7.0f32);
/// let g = f16::from_f32(-7.0f32);
///
/// assert!(!f.is_sign_negative());
/// assert!(g.is_sign_negative());
/// // `NaN` can be either positive or negative
/// assert!(nan.is_sign_positive() != nan.is_sign_negative());
/// ```
#[inline]
pub fn is_sign_negative(self) -> bool {
self.0 & 0x8000u16 != 0
}
}
impl From<f16> for f32 {
fn from(x: f16) -> f32 {
x.to_f32()
}
}
impl From<f16> for f64 {
fn from(x: f16) -> f64 {
x.to_f64()
}
}
impl From<i8> for f16 {
fn from(x: i8) -> f16 {
// Convert to f32, then to f16
f16::from_f32(f32::from(x))
}
}
impl From<u8> for f16 {
fn from(x: u8) -> f16 {
// Convert to f32, then to f16
f16::from_f32(f32::from(x))
}
}
impl PartialEq for f16 {
fn eq(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
(self.0 == other.0) || ((self.0 | other.0) & 0x7FFFu16 == 0)
}
}
}
impl PartialOrd for f16 {
fn partial_cmp(&self, other: &f16) -> Option<Ordering> {
if self.is_nan() || other.is_nan() {
None
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => Some(self.0.cmp(&other.0)),
(false, true) => {
if (self.0 | other.0) & 0x7FFFu16 == 0 {
Some(Ordering::Equal)
} else {
Some(Ordering::Greater)
}
}
(true, false) => {
if (self.0 | other.0) & 0x7FFFu16 == 0 {
Some(Ordering::Equal)
} else {
Some(Ordering::Less)
}
}
(true, true) => Some(other.0.cmp(&self.0)),
}
}
}
fn lt(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 < other.0,
(false, true) => false,
(true, false) => (self.0 | other.0) & 0x7FFFu16 != 0,
(true, true) => self.0 > other.0,
}
}
}
fn le(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 <= other.0,
(false, true) => (self.0 | other.0) & 0x7FFFu16 == 0,
(true, false) => true,
(true, true) => self.0 >= other.0,
}
}
}
fn gt(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 > other.0,
(false, true) => (self.0 | other.0) & 0x7FFFu16 != 0,
(true, false) => false,
(true, true) => self.0 < other.0,
}
}
}
fn ge(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 >= other.0,
(false, true) => true,
(true, false) => (self.0 | other.0) & 0x7FFFu16 == 0,
(true, true) => self.0 <= other.0,
}
}
}
}
impl FromStr for f16 {
type Err = ParseFloatError;
fn from_str(src: &str) -> Result<f16, ParseFloatError> {
f32::from_str(src).map(|x| f16::from_f32(x))
}
}
impl Debug for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "0x{:X}", self.0)
}
}
impl Display for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{}", self.to_f32())
}
}
impl LowerExp for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{:e}", self.to_f32())
}
}
impl UpperExp for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{:E}", self.to_f32())
}
}
#[cfg(feature = "use-intrinsics")]
mod convert {
extern "C" {
#[link_name = "llvm.convert.to.fp16.f32"]
fn convert_to_fp16_f32(f: f32) -> u16;
#[link_name = "llvm.convert.to.fp16.f64"]
fn convert_to_fp16_f64(f: f64) -> u16;
#[link_name = "llvm.convert.from.fp16.f32"]
fn convert_from_fp16_f32(i: u16) -> f32;
#[link_name = "llvm.convert.from.fp16.f64"]
fn convert_from_fp16_f64(i: u16) -> f64;
}
#[inline(always)]
pub fn f32_to_f16(f: f32) -> u16 {
unsafe { convert_to_fp16_f32(f) }
}
#[inline(always)]
pub fn f64_to_f16(f: f64) -> u16 {
unsafe { convert_to_fp16_f64(f) }
}
#[inline(always)]
pub fn f16_to_f32(i: u16) -> f32 {
unsafe { convert_from_fp16_f32(i) }
}
#[inline(always)]
pub fn f16_to_f64(i: u16) -> f64 {
unsafe { convert_from_fp16_f64(i) }
}
}
#[cfg(not(feature = "use-intrinsics"))]
mod convert {
use core;
use core::mem;
pub fn f32_to_f16(value: f32) -> u16 {
// Convert to raw bytes
let x: u32 = unsafe { mem::transmute(value) };
// Check for signed zero
if x & 0x7FFFFFFFu32 == 0 {
return (x >> 16) as u16;
}
// Extract IEEE754 components
let sign = x & 0x80000000u32;
let exp = x & 0x7F800000u32;
let man = x & 0x007FFFFFu32;
// Subnormals will underflow, so return signed zero
if exp == 0 {
return (sign >> 16) as u16;
}
// Check for all exponent bits being set, which is Infinity or NaN
if exp == 0x7F800000u32 {
// A mantissa of zero is a signed Infinity
if man == 0 {
return ((sign >> 16) | 0x7C00u32) as u16;
}
// Otherwise, this is NaN
return ((sign >> 16) | 0x7E00u32) as u16;
}
// The number is normalized, start assembling half precision version
let half_sign = sign >> 16;
// Unbias the exponent, then bias for half precision
let unbiased_exp = ((exp >> 23) as i32) - 127;
let half_exp = unbiased_exp + 15;
// Check for exponent overflow, return +infinity
if half_exp >= 0x1F {
return (half_sign | 0x7C00u32) as u16;
}
// Check for underflow
if half_exp <= 0 {
// Check mantissa for what we can do
if 14 - half_exp > 24 {
// No rounding possibility, so this is a full underflow, return signed zero
return half_sign as u16;
}
// Don't forget about hidden leading mantissa bit when assembling mantissa
let man = man | 0x00800000u32;
let mut half_man = man >> (14 - half_exp);
// Check for rounding
if (man >> (13 - half_exp)) & 0x1u32 != 0 {
half_man += 1;
}
// No exponent for subnormals
return (half_sign | half_man) as u16;
}
// Rebias the exponent
let half_exp = (half_exp as u32) << 10;
let half_man = man >> 13;
// Check for rounding
if man & 0x00001000u32 != 0 {
// Round it
((half_sign | half_exp | half_man) + 1) as u16
} else {
(half_sign | half_exp | half_man) as u16
}
}
pub fn f64_to_f16(value: f64) -> u16 {
// Convert to raw bytes, truncating the last 32-bits of mantissa; that precision will always
// be lost on half-precision.
let val: u64 = unsafe { mem::transmute(value) };
let x = (val >> 32) as u32;
// Check for signed zero
if x & 0x7FFFFFFFu32 == 0 {
return (x >> 16) as u16;
}
// Extract IEEE754 components
let sign = x & 0x80000000u32;
let exp = x & 0x7FF00000u32;
let man = x & 0x000FFFFFu32;
// Subnormals will underflow, so return signed zero
if exp == 0 {
return (sign >> 16) as u16;
}
// Check for all exponent bits being set, which is Infinity or NaN
if exp == 0x7FF00000u32 {
// A mantissa of zero is a signed Infinity. We also have to check the last 32 bits.
if (man == 0) && (val as u32 == 0) {
return ((sign >> 16) | 0x7C00u32) as u16;
}
// Otherwise, this is NaN
return ((sign >> 16) | 0x7E00u32) as u16;
}
// The number is normalized, start assembling half precision version
let half_sign = sign >> 16;
// Unbias the exponent, then bias for half precision
let unbiased_exp = ((exp >> 20) as i64) - 1023;
let half_exp = unbiased_exp + 15;
// Check for exponent overflow, return +infinity
if half_exp >= 0x1F {
return (half_sign | 0x7C00u32) as u16;
}
// Check for underflow
if half_exp <= 0 {
// Check mantissa for what we can do
if 10 - half_exp > 21 {
// No rounding possibility, so this is a full underflow, return signed zero
return half_sign as u16;
}
// Don't forget about hidden leading mantissa bit when assembling mantissa
let man = man | 0x00100000u32;
let mut half_man = man >> (11 - half_exp);
// Check for rounding
if (man >> (10 - half_exp)) & 0x1u32 != 0 {
half_man += 1;
}
// No exponent for subnormals
return (half_sign | half_man) as u16;
}
// Rebias the exponent
let half_exp = (half_exp as u32) << 10;
let half_man = man >> 10;
// Check for rounding
if man & 0x00000200u32 != 0 {
// Round it
((half_sign | half_exp | half_man) + 1) as u16
} else {
(half_sign | half_exp | half_man) as u16
}
}
pub fn f16_to_f32(i: u16) -> f32 {
// Check for signed zero
if i & 0x7FFFu16 == 0 {
return unsafe { mem::transmute((i as u32) << 16) };
}
let half_sign = (i & 0x8000u16) as u32;
let half_exp = (i & 0x7C00u16) as u32;
let half_man = (i & 0x03FFu16) as u32;
// Check for an infinity or NaN when all exponent bits set
if half_exp == 0x7C00u32 {
// Check for signed infinity if mantissa is zero
if half_man == 0 {
return unsafe { mem::transmute((half_sign << 16) | 0x7F800000u32) };
} else {
// NaN, only 1st mantissa bit is set
return core::f32::NAN;
}
}
// Calculate single-precision components with adjusted exponent
let sign = half_sign << 16;
// Unbias exponent
let unbiased_exp = ((half_exp as i32) >> 10) - 15;
// Check for subnormals, which will be normalized by adjusting exponent
if half_exp == 0 {
// Calculate how much to adjust the exponent by
let e = (half_man as u16).leading_zeros() - 6;
// Rebias and adjust exponent
let exp = (127 - 15 - e) << 23;
let man = (half_man << (14 + e)) & 0x7F_FF_FFu32;
return unsafe { mem::transmute(sign | exp | man) };
}
// Rebias exponent for a normalized normal
let exp = ((unbiased_exp + 127) as u32) << 23;
let man = (half_man & 0x03FFu32) << 13;
unsafe { mem::transmute(sign | exp | man) }
}
pub fn f16_to_f64(i: u16) -> f64 {
// Check for signed zero
if i & 0x7FFFu16 == 0 {
return unsafe { mem::transmute((i as u64) << 48) };
}
let half_sign = (i & 0x8000u16) as u64;
let half_exp = (i & 0x7C00u16) as u64;
let half_man = (i & 0x03FFu16) as u64;
// Check for an infinity or NaN when all exponent bits set
if half_exp == 0x7C00u64 {
// Check for signed infinity if mantissa is zero
if half_man == 0 {
return unsafe { mem::transmute((half_sign << 48) | 0x7FF0000000000000u64) };
} else {
// NaN, only 1st mantissa bit is set
return core::f64::NAN;
}
}
// Calculate double-precision components with adjusted exponent
let sign = half_sign << 48;
// Unbias exponent
let unbiased_exp = ((half_exp as i64) >> 10) - 15;
// Check for subnormals, which will be normalized by adjusting exponent
if half_exp == 0 {
// Calculate how much to adjust the exponent by
let e = (half_man as u16).leading_zeros() - 6;
// Rebias and adjust exponent
let exp = ((1023 - 15 - e) as u64) << 52;
let man = (half_man << (43 + e)) & 0xF_FFFF_FFFF_FFFFu64;
return unsafe { mem::transmute(sign | exp | man) };
}
// Rebias exponent for a normalized normal
let exp = ((unbiased_exp + 1023) as u64) << 52;
let man = (half_man & 0x03FFu64) << 42;
unsafe { mem::transmute(sign | exp | man) }
}
}
/// Contains utility functions to convert between slices of `u16` bits and `f16` numbers.
pub mod slice {
use super::f16;
use core::slice;
/// Reinterpret a mutable slice of `u16` bits as a mutable slice of `f16` numbers.
// The transmuted slice has the same life time as the original,
// Which prevents mutating the borrowed `mut [u16]` argument
// As long as the returned `mut [f16]` is borrowed.
#[inline]
pub fn from_bits_mut(bits: &mut [u16]) -> &mut [f16] {
let pointer = bits.as_ptr() as *mut f16;
let length = bits.len();
unsafe { slice::from_raw_parts_mut(pointer, length) }
}
/// Reinterpret a mutable slice of `f16` numbers as a mutable slice of `u16` bits.
// The transmuted slice has the same life time as the original,
// Which prevents mutating the borrowed `mut [f16]` argument
// As long as the returned `mut [u16]` is borrowed.
#[inline]
pub fn to_bits_mut(bits: &mut [f16]) -> &mut [u16] {
let pointer = bits.as_ptr() as *mut u16;
let length = bits.len();
unsafe { slice::from_raw_parts_mut(pointer, length) }
}
/// Reinterpret a slice of `u16` bits as a slice of `f16` numbers.
// The transmuted slice has the same life time as the original
#[inline]
pub fn from_bits(bits: &[u16]) -> &[f16] {
let pointer = bits.as_ptr() as *const f16;
let length = bits.len();
unsafe { slice::from_raw_parts(pointer, length) }
}
/// Reinterpret a slice of `f16` numbers as a slice of `u16` bits.
// The transmuted slice has the same life time as the original
#[inline]
pub fn to_bits(bits: &[f16]) -> &[u16] {
let pointer = bits.as_ptr() as *const u16;
let length = bits.len();
unsafe { slice::from_raw_parts(pointer, length) }
}
}
/// Contains utility functions to convert between vectors of `u16` bits and `f16` vectors.
///
/// This module is only available with the `std` feature.
#[cfg(feature = "std")]
pub mod vec {
use super::f16;
use core::mem;
/// Converts a vector of `u16` elements into a vector of `f16` elements.
/// This function merely reinterprets the contents of the vector,
/// so it's a zero-copy operation.
#[inline]
pub fn from_bits(bits: Vec<u16>) -> Vec<f16> {
let mut bits = bits;
// An f16 array has same length and capacity as u16 array
let length = bits.len();
let capacity = bits.capacity();
// Actually reinterpret the contents of the Vec<u16> as f16,
// knowing that structs are represented as only their members in memory,
// which is the u16 part of `f16(u16)`
let pointer = bits.as_mut_ptr() as *mut f16;
// Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted
mem::forget(bits);
// Finally construct a new Vec<f16> from the raw pointer
unsafe { Vec::from_raw_parts(pointer, length, capacity) }
}
/// Converts a vector of `f16` elements into a vector of `u16` elements.
/// This function merely reinterprets the contents of the vector,
/// so it's a zero-copy operation.
#[inline]
pub fn to_bits(numbers: Vec<f16>) -> Vec<u16> {
let mut numbers = numbers;
// An f16 array has same length and capacity as u16 array
let length = numbers.len();
let capacity = numbers.capacity();
// Actually reinterpret the contents of the Vec<f16> as u16,
// knowing that structs are represented as only their members in memory,
// which is the u16 part of `f16(u16)`
let pointer = numbers.as_mut_ptr() as *mut u16;
// Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted
mem::forget(numbers);
// Finally construct a new Vec<f16> from the raw pointer
unsafe { Vec::from_raw_parts(pointer, length, capacity) }
}
}
#[cfg(test)]
mod test {
use super::*;
use core;
use core::cmp::Ordering;
#[test]
fn test_f16_consts_from_f32() {
let one = f16::from_f32(1.0);
let zero = f16::from_f32(0.0);
let neg_zero = f16::from_f32(-0.0);
let inf = f16::from_f32(core::f32::INFINITY);
let neg_inf = f16::from_f32(core::f32::NEG_INFINITY);
let nan = f16::from_f32(core::f32::NAN);
assert_eq!(consts::ONE, one);
assert_eq!(consts::ZERO, zero);
assert_eq!(consts::NEG_ZERO, neg_zero);
assert_eq!(consts::INFINITY, inf);
assert_eq!(consts::NEG_INFINITY, neg_inf);
assert!(nan.is_nan());
assert!(consts::NAN.is_nan());
let e = f16::from_f32(core::f32::consts::E);
let pi = f16::from_f32(core::f32::consts::PI);
let frac_1_pi = f16::from_f32(core::f32::consts::FRAC_1_PI);
let frac_1_sqrt_2 = f16::from_f32(core::f32::consts::FRAC_1_SQRT_2);
let frac_2_pi = f16::from_f32(core::f32::consts::FRAC_2_PI);
let frac_2_sqrt_pi = f16::from_f32(core::f32::consts::FRAC_2_SQRT_PI);
let frac_pi_2 = f16::from_f32(core::f32::consts::FRAC_PI_2);
let frac_pi_3 = f16::from_f32(core::f32::consts::FRAC_PI_3);
let frac_pi_4 = f16::from_f32(core::f32::consts::FRAC_PI_4);
let frac_pi_6 = f16::from_f32(core::f32::consts::FRAC_PI_6);
let frac_pi_8 = f16::from_f32(core::f32::consts::FRAC_PI_8);
let ln_10 = f16::from_f32(core::f32::consts::LN_10);
let ln_2 = f16::from_f32(core::f32::consts::LN_2);
let log10_e = f16::from_f32(core::f32::consts::LOG10_E);
let log2_e = f16::from_f32(core::f32::consts::LOG2_E);
let sqrt_2 = f16::from_f32(core::f32::consts::SQRT_2);
assert_eq!(consts::E, e);
assert_eq!(consts::PI, pi);
assert_eq!(consts::FRAC_1_PI, frac_1_pi);
assert_eq!(consts::FRAC_1_SQRT_2, frac_1_sqrt_2);
assert_eq!(consts::FRAC_2_PI, frac_2_pi);
assert_eq!(consts::FRAC_2_SQRT_PI, frac_2_sqrt_pi);
assert_eq!(consts::FRAC_PI_2, frac_pi_2);
assert_eq!(consts::FRAC_PI_3, frac_pi_3);
assert_eq!(consts::FRAC_PI_4, frac_pi_4);
assert_eq!(consts::FRAC_PI_6, frac_pi_6);
assert_eq!(consts::FRAC_PI_8, frac_pi_8);
assert_eq!(consts::LN_10, ln_10);
assert_eq!(consts::LN_2, ln_2);
assert_eq!(consts::LOG10_E, log10_e);
assert_eq!(consts::LOG2_E, log2_e);
assert_eq!(consts::SQRT_2, sqrt_2);
}
#[test]
fn test_f16_consts_from_f64() {
let one = f16::from_f64(1.0);
let zero = f16::from_f64(0.0);
let neg_zero = f16::from_f64(-0.0);
let inf = f16::from_f64(core::f64::INFINITY);
let neg_inf = f16::from_f64(core::f64::NEG_INFINITY);
let nan = f16::from_f64(core::f64::NAN);
assert_eq!(consts::ONE, one);
assert_eq!(consts::ZERO, zero);
assert_eq!(consts::NEG_ZERO, neg_zero);
assert_eq!(consts::INFINITY, inf);
assert_eq!(consts::NEG_INFINITY, neg_inf);
assert!(nan.is_nan());
assert!(consts::NAN.is_nan());
let e = f16::from_f64(core::f64::consts::E);
let pi = f16::from_f64(core::f64::consts::PI);
let frac_1_pi = f16::from_f64(core::f64::consts::FRAC_1_PI);
let frac_1_sqrt_2 = f16::from_f64(core::f64::consts::FRAC_1_SQRT_2);
let frac_2_pi = f16::from_f64(core::f64::consts::FRAC_2_PI);
let frac_2_sqrt_pi = f16::from_f64(core::f64::consts::FRAC_2_SQRT_PI);
let frac_pi_2 = f16::from_f64(core::f64::consts::FRAC_PI_2);
let frac_pi_3 = f16::from_f64(core::f64::consts::FRAC_PI_3);
let frac_pi_4 = f16::from_f64(core::f64::consts::FRAC_PI_4);
let frac_pi_6 = f16::from_f64(core::f64::consts::FRAC_PI_6);
let frac_pi_8 = f16::from_f64(core::f64::consts::FRAC_PI_8);
let ln_10 = f16::from_f64(core::f64::consts::LN_10);
let ln_2 = f16::from_f64(core::f64::consts::LN_2);
let log10_e = f16::from_f64(core::f64::consts::LOG10_E);
let log2_e = f16::from_f64(core::f64::consts::LOG2_E);
let sqrt_2 = f16::from_f64(core::f64::consts::SQRT_2);
assert_eq!(consts::E, e);
assert_eq!(consts::PI, pi);
assert_eq!(consts::FRAC_1_PI, frac_1_pi);
assert_eq!(consts::FRAC_1_SQRT_2, frac_1_sqrt_2);
assert_eq!(consts::FRAC_2_PI, frac_2_pi);
assert_eq!(consts::FRAC_2_SQRT_PI, frac_2_sqrt_pi);
assert_eq!(consts::FRAC_PI_2, frac_pi_2);
assert_eq!(consts::FRAC_PI_3, frac_pi_3);
assert_eq!(consts::FRAC_PI_4, frac_pi_4);
assert_eq!(consts::FRAC_PI_6, frac_pi_6);
assert_eq!(consts::FRAC_PI_8, frac_pi_8);
assert_eq!(consts::LN_10, ln_10);
assert_eq!(consts::LN_2, ln_2);
assert_eq!(consts::LOG10_E, log10_e);
assert_eq!(consts::LOG2_E, log2_e);
assert_eq!(consts::SQRT_2, sqrt_2);
}
#[test]
fn test_nan_conversion() {
use core::mem;
let nan64: f64;
let neg_nan64: f64;
let nan32: f32;
let neg_nan32: f32;
unsafe {
nan64 = mem::transmute(0x7ff0_0000_0000_0001u64);
neg_nan64 = mem::transmute(0xfff0_0000_0000_0001u64);
nan32 = mem::transmute(0x7f80_0001u32);
neg_nan32 = mem::transmute(0xff80_0001u32);
}
let nan32_from_64 = nan64 as f32;
let neg_nan32_from_64 = neg_nan64 as f32;
let nan16_from_64 = f16::from_f64(nan64);
let neg_nan16_from_64 = f16::from_f64(neg_nan64);
let nan16_from_32 = f16::from_f32(nan32);
let neg_nan16_from_32 = f16::from_f32(neg_nan32);
assert!(nan64.is_nan());
assert!(neg_nan64.is_nan());
assert!(nan32.is_nan());
assert!(neg_nan32.is_nan());
assert!(nan32_from_64.is_nan());
assert!(neg_nan32_from_64.is_nan());
assert!(nan16_from_64.is_nan());
assert!(neg_nan16_from_64.is_nan());
assert!(nan16_from_32.is_nan());
assert!(neg_nan16_from_32.is_nan());
let sign64 = 1u64 << 63;
let sign32 = 1u32 << 31;
let sign16 = 1u16 << 15;
let nan64_u: u64;
let neg_nan64_u: u64;
let nan32_u: u32;
let neg_nan32_u: u32;
let nan32_from_64_u: u32;
let neg_nan32_from_64_u: u32;
let nan16_from_64_u: u16;
let neg_nan16_from_64_u: u16;
let nan16_from_32_u: u16;
let neg_nan16_from_32_u: u16;
unsafe {
nan64_u = mem::transmute(nan64);
neg_nan64_u = mem::transmute(neg_nan64);
nan32_u = mem::transmute(nan32);
neg_nan32_u = mem::transmute(neg_nan32);
nan32_from_64_u = mem::transmute(nan32_from_64);
neg_nan32_from_64_u = mem::transmute(neg_nan32_from_64);
nan16_from_64_u = mem::transmute(nan16_from_64);
neg_nan16_from_64_u = mem::transmute(neg_nan16_from_64);
nan16_from_32_u = mem::transmute(nan16_from_32);
neg_nan16_from_32_u = mem::transmute(neg_nan16_from_32);
}
assert_eq!(nan64_u & sign64, 0);
assert_eq!(neg_nan64_u & sign64, sign64);
assert_eq!(nan32_u & sign32, 0);
assert_eq!(neg_nan32_u & sign32, sign32);
assert_eq!(nan32_from_64_u & sign32, 0);
assert_eq!(neg_nan32_from_64_u & sign32, sign32);
assert_eq!(nan16_from_64_u & sign16, 0);
assert_eq!(neg_nan16_from_64_u & sign16, sign16);
assert_eq!(nan16_from_32_u & sign16, 0);
assert_eq!(neg_nan16_from_32_u & sign16, sign16);
}
#[test]
fn test_f16_to_f32() {
let f = f16::from_f32(7.0);
assert_eq!(f.to_f32(), 7.0f32);
// 7.1 is NOT exactly representable in 16-bit, it's rounded
let f = f16::from_f32(7.1);
let diff = (f.to_f32() - 7.1f32).abs();
assert!(diff <= consts::EPSILON.to_f32());
assert_eq!(f16::from_bits(0x0000_0001).to_f32(), 2.0f32.powi(-24));
assert_eq!(f16::from_bits(0x0000_0005).to_f32(), 5.0 * 2.0f32.powi(-24));
assert_eq!(f16::from_bits(0x0000_0001), f16::from_f32(2.0f32.powi(-24)));
assert_eq!(
f16::from_bits(0x0000_0005),
f16::from_f32(5.0 * 2.0f32.powi(-24))
);
}
#[test]
fn test_f16_to_f64() {
let f = f16::from_f64(7.0);
assert_eq!(f.to_f64(), 7.0f64);
// 7.1 is NOT exactly representable in 16-bit, it's rounded
let f = f16::from_f64(7.1);
let diff = (f.to_f64() - 7.1f64).abs();
assert!(diff <= consts::EPSILON.to_f64());
assert_eq!(f16::from_bits(0x0000_0001).to_f64(), 2.0f64.powi(-24));
assert_eq!(f16::from_bits(0x0000_0005).to_f64(), 5.0 * 2.0f64.powi(-24));
assert_eq!(f16::from_bits(0x0000_0001), f16::from_f64(2.0f64.powi(-24)));
assert_eq!(
f16::from_bits(0x0000_0005),
f16::from_f64(5.0 * 2.0f64.powi(-24))
);
}
#[test]
fn test_comparisons() {
let zero = f16::from_f64(0.0);
let one = f16::from_f64(1.0);
let neg_zero = f16::from_f64(-0.0);
let neg_one = f16::from_f64(-1.0);
assert_eq!(zero.partial_cmp(&neg_zero), Some(Ordering::Equal));
assert_eq!(neg_zero.partial_cmp(&zero), Some(Ordering::Equal));
assert!(zero == neg_zero);
assert!(neg_zero == zero);
assert!(!(zero != neg_zero));
assert!(!(neg_zero != zero));
assert!(!(zero < neg_zero));
assert!(!(neg_zero < zero));
assert!(zero <= neg_zero);
assert!(neg_zero <= zero);
assert!(!(zero > neg_zero));
assert!(!(neg_zero > zero));
assert!(zero >= neg_zero);
assert!(neg_zero >= zero);
assert_eq!(one.partial_cmp(&neg_zero), Some(Ordering::Greater));
assert_eq!(neg_zero.partial_cmp(&one), Some(Ordering::Less));
assert!(!(one == neg_zero));
assert!(!(neg_zero == one));
assert!(one != neg_zero);
assert!(neg_zero != one);
assert!(!(one < neg_zero));
assert!(neg_zero < one);
assert!(!(one <= neg_zero));
assert!(neg_zero <= one);
assert!(one > neg_zero);
assert!(!(neg_zero > one));
assert!(one >= neg_zero);
assert!(!(neg_zero >= one));
assert_eq!(one.partial_cmp(&neg_one), Some(Ordering::Greater));
assert_eq!(neg_one.partial_cmp(&one), Some(Ordering::Less));
assert!(!(one == neg_one));
assert!(!(neg_one == one));
assert!(one != neg_one);
assert!(neg_one != one);
assert!(!(one < neg_one));
assert!(neg_one < one);
assert!(!(one <= neg_one));
assert!(neg_one <= one);
assert!(one > neg_one);
assert!(!(neg_one > one));
assert!(one >= neg_one);
assert!(!(neg_one >= one));
}
#[test]
fn test_slice_conversions() {
use consts::*;
let bits = &[
E.to_bits(),
PI.to_bits(),
EPSILON.to_bits(),
FRAC_1_SQRT_2.to_bits(),
];
let numbers = &[E, PI, EPSILON, FRAC_1_SQRT_2];
// Convert from bits to numbers
let from_bits = slice::from_bits(bits);
assert_slice_contents_eq(from_bits, numbers);
// Convert from numbers back to bits
let to_bits = slice::to_bits(from_bits);
assert_slice_contents_eq(to_bits, bits);
}
#[test]
#[cfg(feature = "std")]
fn test_vec_conversions() {
use consts::*;
let numbers = vec![E, PI, EPSILON, FRAC_1_SQRT_2];
let bits = vec![
E.to_bits(),
PI.to_bits(),
EPSILON.to_bits(),
FRAC_1_SQRT_2.to_bits(),
];
let bits_cloned = bits.clone();
// Convert from bits to numbers
let from_bits = vec::from_bits(bits);
assert_slice_contents_eq(&from_bits, &numbers);
// Convert from numbers back to bits
let to_bits = vec::to_bits(from_bits);
assert_slice_contents_eq(&to_bits, &bits_cloned);
}
fn assert_slice_contents_eq<T: PartialEq + core::fmt::Debug>(a: &[T], b: &[T]) {
// Checks only pointer and len,
// but we know these are the same
// because we just transmuted them, so
assert_eq!(a, b);
// We need to perform manual content equality checks
for (a, b) in a.iter().zip(b.iter()) {
assert_eq!(a, b);
}
}
#[test]
fn test_mutablility(){
use consts::*;
let mut bits_array = [ PI.to_bits() ];
let bits = &mut bits_array[..];
{ // would not compile without these braces
// TODO: add automated test to check that it does not compile without braces
let numbers = slice::from_bits_mut(bits);
numbers[0] = E;
}
assert_eq!(bits, &[ E.to_bits() ]);
bits[0] = LN_2.to_bits();
assert_eq!(bits, &[ LN_2.to_bits() ]);
}
}
fix tests that used EPSILON when comparing to 7.1
//! A crate that provides support for the half-precision floating point type.
//!
//! This crate provides the `f16` type, which is an implementation of the IEEE 754-2008 `binary16`
//! floating point type. This 'half' precision floating point type is intended for efficient storage
//! where the full range and precision of a larger floating point value is not required. This is
//! especially useful for image storage formats.
//!
//! Because `f16` is primarily for efficient storage, floating point operations are not implemented.
//! Operations should be performed with `f32` or higher-precision types and converted to/from `f16`
//! as necessary.
//!
//! Some hardware architectures provide support for 16-bit floating point conversions. Enable the
//! `use-intrinsics` feature to use LLVM intrinsics for hardware conversions. This crate does no
//! checks on whether the hardware supports the feature. This feature currently only works on
//! nightly Rust due to a compiler feature gate.
//!
//! Support for `serde` crate `Serialize` and `Deserialize` traits is provided when the `serde`
//! feature is enabled. This adds a dependency on `serde` crate so is an optional feature that works
//! on Rust 1.15 or newer.
//!
//! The crate uses `#[no_std]` by default, so can be used in embedded environments without using the
//! Rust `std` library. A `std` feature is available, which enables additional utilities using the
//! `std` library, such as the `vec` module that provides zero-copy `Vec` conversions.
#![warn(
missing_docs, missing_copy_implementations, missing_debug_implementations, trivial_casts,
trivial_numeric_casts, unused_extern_crates, unused_import_braces
)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature = "use-intrinsics", feature(link_llvm_intrinsics))]
#[cfg(feature = "serde")]
#[macro_use]
extern crate serde;
#[cfg(feature = "std")]
extern crate core;
use core::cmp::Ordering;
use core::fmt::{Debug, Display, Error, Formatter, LowerExp, UpperExp};
use core::num::{FpCategory, ParseFloatError};
use core::str::FromStr;
/// The 16-bit floating point type.
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Default)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct f16(u16);
pub mod consts {
//! Useful `f16` constants.
use super::f16;
/// 16-bit equivalent of `std::f32::DIGITS`
pub const DIGITS: u32 = 3;
/// 16-bit floating point epsilon. `9.7656e-4`
pub const EPSILON: f16 = f16(0x1400u16);
/// 16-bit positive infinity.
pub const INFINITY: f16 = f16(0x7C00u16);
/// 16-bit equivalent of `std::f32::MANTISSA_DIGITS`
pub const MANTISSA_DIGITS: u32 = 11;
/// Largest finite `f16` value. `65504`
pub const MAX: f16 = f16(0x7BFF);
/// 16-bit equivalent of `std::f32::MAX_10_EXP`
pub const MAX_10_EXP: i32 = 5;
/// 16-bit equivalent of `std::f32::MAX_EXP`
pub const MAX_EXP: i32 = 16;
/// Smallest finite `f16` value. `-65504`
pub const MIN: f16 = f16(0xFBFF);
/// 16-bit equivalent of `std::f32::MIN_10_EXP`
pub const MIN_10_EXP: i32 = -4;
/// 16-bit equivalent of `std::f32::MIN_EXP`
pub const MIN_EXP: i32 = -13;
/// Smallest positive, normalized `f16` value. Approx. `6.10352e−5`
pub const MIN_POSITIVE: f16 = f16(0x0400u16);
/// 16-bit NaN.
pub const NAN: f16 = f16(0x7E00u16);
/// 16-bit negative infinity.
pub const NEG_INFINITY: f16 = f16(0xFC00u16);
/// 16-bit equivalent of `std::f32::RADIX`
pub const RADIX: u32 = 2;
/// 16-bit minimum positive subnormal value. Approx. `5.96046e−8`
pub const MIN_POSITIVE_SUBNORMAL: f16 = f16(0x0001u16);
/// 16-bit maximum subnormal value. Approx. `6.09756e−5`
pub const MAX_SUBNORMAL: f16 = f16(0x03FFu16);
/// 16-bit floating point `1.0`
pub const ONE: f16 = f16(0x3C00u16);
/// 16-bit floating point `0.0`
pub const ZERO: f16 = f16(0x0000u16);
/// 16-bit floating point `-0.0`
pub const NEG_ZERO: f16 = f16(0x8000u16);
/// Euler's number.
pub const E: f16 = f16(0x4170u16);
/// Archimedes' constant.
pub const PI: f16 = f16(0x4248u16);
/// 1.0/pi
pub const FRAC_1_PI: f16 = f16(0x3518u16);
/// 1.0/sqrt(2.0)
pub const FRAC_1_SQRT_2: f16 = f16(0x39A8u16);
/// 2.0/pi
pub const FRAC_2_PI: f16 = f16(0x3918u16);
/// 2.0/sqrt(pi)
pub const FRAC_2_SQRT_PI: f16 = f16(0x3C83u16);
/// pi/2.0
pub const FRAC_PI_2: f16 = f16(0x3E48u16);
/// pi/3.0
pub const FRAC_PI_3: f16 = f16(0x3C30u16);
/// pi/4.0
pub const FRAC_PI_4: f16 = f16(0x3A48u16);
/// pi/6.0
pub const FRAC_PI_6: f16 = f16(0x3830u16);
/// pi/8.0
pub const FRAC_PI_8: f16 = f16(0x3648u16);
/// ln(10.0)
pub const LN_10: f16 = f16(0x409Bu16);
/// ln(2.0)
pub const LN_2: f16 = f16(0x398Cu16);
/// log10(e)
pub const LOG10_E: f16 = f16(0x36F3u16);
/// log2(e)
pub const LOG2_E: f16 = f16(0x3DC5u16);
/// sqrt(2)
pub const SQRT_2: f16 = f16(0x3DA8u16);
}
impl f16 {
/// Constructs a 16-bit floating point value from the raw bits.
#[inline]
pub fn from_bits(bits: u16) -> f16 {
f16(bits)
}
/// Constructs a 16-bit floating point value from a 32-bit floating point value.
///
/// If the 32-bit value is to large to fit in 16-bits, +/- infinity will result. NaN values are
/// preserved. 32-bit subnormal values are too tiny to be represented in 16-bits and result in
/// +/- 0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit subnormals
/// or +/- 0. All other values are truncated and rounded to the nearest representable 16-bit
/// value.
#[inline]
pub fn from_f32(value: f32) -> f16 {
f16(convert::f32_to_f16(value))
}
/// Constructs a 16-bit floating point value from a 64-bit floating point value.
///
/// If the 64-bit value is to large to fit in 16-bits, +/- infinity will result. NaN values are
/// preserved. 64-bit subnormal values are too tiny to be represented in 16-bits and result in
/// +/- 0. Exponents that underflow the minimum 16-bit exponent will result in 16-bit subnormals
/// or +/- 0. All other values are truncated and rounded to the nearest representable 16-bit
/// value.
#[inline]
pub fn from_f64(value: f64) -> f16 {
f16(convert::f64_to_f16(value))
}
/// Converts an `f16` into the underlying bit representation.
#[inline]
pub fn to_bits(self) -> u16 {
self.0
}
/// Converts an `f16` into the underlying bit representation.
#[deprecated(since = "1.2.0", note = "renamed to to_bits")]
#[inline]
pub fn as_bits(self) -> u16 {
self.to_bits()
}
/// Converts an `f16` value in a `f32` value.
///
/// This conversion is lossless as all 16-bit floating point values can be represented exactly
/// in 32-bit floating point.
#[inline]
pub fn to_f32(self) -> f32 {
convert::f16_to_f32(self.0)
}
/// Converts an `f16` value in a `f64` value.
///
/// This conversion is lossless as all 16-bit floating point values can be represented exactly
/// in 64-bit floating point.
#[inline]
pub fn to_f64(self) -> f64 {
convert::f16_to_f64(self.0)
}
/// Returns `true` if this value is `NaN` and `false` otherwise.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let nan = half::consts::NAN;
/// let f = f16::from_f32(7.0_f32);
///
/// assert!(nan.is_nan());
/// assert!(!f.is_nan());
/// ```
#[inline]
pub fn is_nan(self) -> bool {
self.0 & 0x7FFFu16 > 0x7C00u16
}
/// Returns `true` if this value is positive infinity or negative infinity and `false`
/// otherwise.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let f = f16::from_f32(7.0f32);
/// let inf = half::consts::INFINITY;
/// let neg_inf = half::consts::NEG_INFINITY;
/// let nan = half::consts::NAN;
///
/// assert!(!f.is_infinite());
/// assert!(!nan.is_infinite());
///
/// assert!(inf.is_infinite());
/// assert!(neg_inf.is_infinite());
/// ```
#[inline]
pub fn is_infinite(self) -> bool {
self.0 & 0x7FFFu16 == 0x7C00u16
}
/// Returns `true` if this number is neither infinite nor `NaN`.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let f = f16::from_f32(7.0f32);
/// let inf = half::consts::INFINITY;
/// let neg_inf = half::consts::NEG_INFINITY;
/// let nan = half::consts::NAN;
///
/// assert!(f.is_finite());
///
/// assert!(!nan.is_finite());
/// assert!(!inf.is_finite());
/// assert!(!neg_inf.is_finite());
/// ```
#[inline]
pub fn is_finite(self) -> bool {
self.0 & 0x7C00u16 != 0x7C00u16
}
/// Returns `true` if the number is neither zero, infinite, subnormal, or `NaN`.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let min = half::consts::MIN_POSITIVE;
/// let max = half::consts::MAX;
/// let lower_than_min = f16::from_f32(1.0e-10_f32);
/// let zero = f16::from_f32(0.0_f32);
///
/// assert!(min.is_normal());
/// assert!(max.is_normal());
///
/// assert!(!zero.is_normal());
/// assert!(!half::consts::NAN.is_normal());
/// assert!(!half::consts::INFINITY.is_normal());
/// // Values between `0` and `min` are Subnormal.
/// assert!(!lower_than_min.is_normal());
/// ```
#[inline]
pub fn is_normal(self) -> bool {
let exp = self.0 & 0x7C00u16;
exp != 0x7C00u16 && exp != 0
}
/// Returns the floating point category of the number.
///
/// If only one property is going to be tested, it is generally faster to use the specific
/// predicate instead.
///
/// # Examples
///
/// ```rust
/// use std::num::FpCategory;
/// use half::f16;
///
/// let num = f16::from_f32(12.4_f32);
/// let inf = half::consts::INFINITY;
///
/// assert_eq!(num.classify(), FpCategory::Normal);
/// assert_eq!(inf.classify(), FpCategory::Infinite);
/// ```
pub fn classify(self) -> FpCategory {
let exp = self.0 & 0x7C00u16;
let man = self.0 & 0x03FFu16;
if exp == 0 {
if man == 0 {
FpCategory::Zero
} else {
FpCategory::Subnormal
}
} else if exp == 0x7C00u16 {
if man == 0 {
FpCategory::Infinite
} else {
FpCategory::Nan
}
} else {
FpCategory::Normal
}
}
/// Returns a number that represents the sign of `self`.
///
/// * `1.0` if the number is positive, `+0.0` or `INFINITY`
/// * `-1.0` if the number is negative, `-0.0` or `NEG_INFINITY`
/// * `NAN` if the number is `NAN`
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let f = f16::from_f32(3.5_f32);
///
/// assert_eq!(f.signum(), f16::from_f32(1.0));
/// assert_eq!(half::consts::NEG_INFINITY.signum(), f16::from_f32(-1.0));
///
/// assert!(half::consts::NAN.signum().is_nan());
/// ```
pub fn signum(self) -> f16 {
if self.is_nan() {
self
} else if self.0 & 0x8000u16 != 0 {
f16::from_f32(-1.0)
} else {
f16::from_f32(1.0)
}
}
/// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaNs` with
/// positive sign bit and positive infinity.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let nan = half::consts::NAN;
/// let f = f16::from_f32(7.0_f32);
/// let g = f16::from_f32(-7.0_f32);
///
/// assert!(f.is_sign_positive());
/// assert!(!g.is_sign_positive());
/// // `NaN` can be either positive or negative
/// assert!(nan.is_sign_positive() != nan.is_sign_negative());
/// ```
#[inline]
pub fn is_sign_positive(self) -> bool {
self.0 & 0x8000u16 == 0
}
/// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaNs` with
/// negative sign bit and negative infinity.
///
/// # Examples
///
/// ```rust
/// use half::f16;
///
/// let nan = half::consts::NAN;
/// let f = f16::from_f32(7.0f32);
/// let g = f16::from_f32(-7.0f32);
///
/// assert!(!f.is_sign_negative());
/// assert!(g.is_sign_negative());
/// // `NaN` can be either positive or negative
/// assert!(nan.is_sign_positive() != nan.is_sign_negative());
/// ```
#[inline]
pub fn is_sign_negative(self) -> bool {
self.0 & 0x8000u16 != 0
}
}
impl From<f16> for f32 {
fn from(x: f16) -> f32 {
x.to_f32()
}
}
impl From<f16> for f64 {
fn from(x: f16) -> f64 {
x.to_f64()
}
}
impl From<i8> for f16 {
fn from(x: i8) -> f16 {
// Convert to f32, then to f16
f16::from_f32(f32::from(x))
}
}
impl From<u8> for f16 {
fn from(x: u8) -> f16 {
// Convert to f32, then to f16
f16::from_f32(f32::from(x))
}
}
impl PartialEq for f16 {
fn eq(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
(self.0 == other.0) || ((self.0 | other.0) & 0x7FFFu16 == 0)
}
}
}
impl PartialOrd for f16 {
fn partial_cmp(&self, other: &f16) -> Option<Ordering> {
if self.is_nan() || other.is_nan() {
None
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => Some(self.0.cmp(&other.0)),
(false, true) => {
if (self.0 | other.0) & 0x7FFFu16 == 0 {
Some(Ordering::Equal)
} else {
Some(Ordering::Greater)
}
}
(true, false) => {
if (self.0 | other.0) & 0x7FFFu16 == 0 {
Some(Ordering::Equal)
} else {
Some(Ordering::Less)
}
}
(true, true) => Some(other.0.cmp(&self.0)),
}
}
}
fn lt(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 < other.0,
(false, true) => false,
(true, false) => (self.0 | other.0) & 0x7FFFu16 != 0,
(true, true) => self.0 > other.0,
}
}
}
fn le(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 <= other.0,
(false, true) => (self.0 | other.0) & 0x7FFFu16 == 0,
(true, false) => true,
(true, true) => self.0 >= other.0,
}
}
}
fn gt(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 > other.0,
(false, true) => (self.0 | other.0) & 0x7FFFu16 != 0,
(true, false) => false,
(true, true) => self.0 < other.0,
}
}
}
fn ge(&self, other: &f16) -> bool {
if self.is_nan() || other.is_nan() {
false
} else {
let neg = self.0 & 0x8000u16 != 0;
let other_neg = other.0 & 0x8000u16 != 0;
match (neg, other_neg) {
(false, false) => self.0 >= other.0,
(false, true) => true,
(true, false) => (self.0 | other.0) & 0x7FFFu16 == 0,
(true, true) => self.0 <= other.0,
}
}
}
}
impl FromStr for f16 {
type Err = ParseFloatError;
fn from_str(src: &str) -> Result<f16, ParseFloatError> {
f32::from_str(src).map(|x| f16::from_f32(x))
}
}
impl Debug for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "0x{:X}", self.0)
}
}
impl Display for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{}", self.to_f32())
}
}
impl LowerExp for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{:e}", self.to_f32())
}
}
impl UpperExp for f16 {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{:E}", self.to_f32())
}
}
#[cfg(feature = "use-intrinsics")]
mod convert {
extern "C" {
#[link_name = "llvm.convert.to.fp16.f32"]
fn convert_to_fp16_f32(f: f32) -> u16;
#[link_name = "llvm.convert.to.fp16.f64"]
fn convert_to_fp16_f64(f: f64) -> u16;
#[link_name = "llvm.convert.from.fp16.f32"]
fn convert_from_fp16_f32(i: u16) -> f32;
#[link_name = "llvm.convert.from.fp16.f64"]
fn convert_from_fp16_f64(i: u16) -> f64;
}
#[inline(always)]
pub fn f32_to_f16(f: f32) -> u16 {
unsafe { convert_to_fp16_f32(f) }
}
#[inline(always)]
pub fn f64_to_f16(f: f64) -> u16 {
unsafe { convert_to_fp16_f64(f) }
}
#[inline(always)]
pub fn f16_to_f32(i: u16) -> f32 {
unsafe { convert_from_fp16_f32(i) }
}
#[inline(always)]
pub fn f16_to_f64(i: u16) -> f64 {
unsafe { convert_from_fp16_f64(i) }
}
}
#[cfg(not(feature = "use-intrinsics"))]
mod convert {
use core;
use core::mem;
pub fn f32_to_f16(value: f32) -> u16 {
// Convert to raw bytes
let x: u32 = unsafe { mem::transmute(value) };
// Check for signed zero
if x & 0x7FFFFFFFu32 == 0 {
return (x >> 16) as u16;
}
// Extract IEEE754 components
let sign = x & 0x80000000u32;
let exp = x & 0x7F800000u32;
let man = x & 0x007FFFFFu32;
// Subnormals will underflow, so return signed zero
if exp == 0 {
return (sign >> 16) as u16;
}
// Check for all exponent bits being set, which is Infinity or NaN
if exp == 0x7F800000u32 {
// A mantissa of zero is a signed Infinity
if man == 0 {
return ((sign >> 16) | 0x7C00u32) as u16;
}
// Otherwise, this is NaN
return ((sign >> 16) | 0x7E00u32) as u16;
}
// The number is normalized, start assembling half precision version
let half_sign = sign >> 16;
// Unbias the exponent, then bias for half precision
let unbiased_exp = ((exp >> 23) as i32) - 127;
let half_exp = unbiased_exp + 15;
// Check for exponent overflow, return +infinity
if half_exp >= 0x1F {
return (half_sign | 0x7C00u32) as u16;
}
// Check for underflow
if half_exp <= 0 {
// Check mantissa for what we can do
if 14 - half_exp > 24 {
// No rounding possibility, so this is a full underflow, return signed zero
return half_sign as u16;
}
// Don't forget about hidden leading mantissa bit when assembling mantissa
let man = man | 0x00800000u32;
let mut half_man = man >> (14 - half_exp);
// Check for rounding
if (man >> (13 - half_exp)) & 0x1u32 != 0 {
half_man += 1;
}
// No exponent for subnormals
return (half_sign | half_man) as u16;
}
// Rebias the exponent
let half_exp = (half_exp as u32) << 10;
let half_man = man >> 13;
// Check for rounding
if man & 0x00001000u32 != 0 {
// Round it
((half_sign | half_exp | half_man) + 1) as u16
} else {
(half_sign | half_exp | half_man) as u16
}
}
pub fn f64_to_f16(value: f64) -> u16 {
// Convert to raw bytes, truncating the last 32-bits of mantissa; that precision will always
// be lost on half-precision.
let val: u64 = unsafe { mem::transmute(value) };
let x = (val >> 32) as u32;
// Check for signed zero
if x & 0x7FFFFFFFu32 == 0 {
return (x >> 16) as u16;
}
// Extract IEEE754 components
let sign = x & 0x80000000u32;
let exp = x & 0x7FF00000u32;
let man = x & 0x000FFFFFu32;
// Subnormals will underflow, so return signed zero
if exp == 0 {
return (sign >> 16) as u16;
}
// Check for all exponent bits being set, which is Infinity or NaN
if exp == 0x7FF00000u32 {
// A mantissa of zero is a signed Infinity. We also have to check the last 32 bits.
if (man == 0) && (val as u32 == 0) {
return ((sign >> 16) | 0x7C00u32) as u16;
}
// Otherwise, this is NaN
return ((sign >> 16) | 0x7E00u32) as u16;
}
// The number is normalized, start assembling half precision version
let half_sign = sign >> 16;
// Unbias the exponent, then bias for half precision
let unbiased_exp = ((exp >> 20) as i64) - 1023;
let half_exp = unbiased_exp + 15;
// Check for exponent overflow, return +infinity
if half_exp >= 0x1F {
return (half_sign | 0x7C00u32) as u16;
}
// Check for underflow
if half_exp <= 0 {
// Check mantissa for what we can do
if 10 - half_exp > 21 {
// No rounding possibility, so this is a full underflow, return signed zero
return half_sign as u16;
}
// Don't forget about hidden leading mantissa bit when assembling mantissa
let man = man | 0x00100000u32;
let mut half_man = man >> (11 - half_exp);
// Check for rounding
if (man >> (10 - half_exp)) & 0x1u32 != 0 {
half_man += 1;
}
// No exponent for subnormals
return (half_sign | half_man) as u16;
}
// Rebias the exponent
let half_exp = (half_exp as u32) << 10;
let half_man = man >> 10;
// Check for rounding
if man & 0x00000200u32 != 0 {
// Round it
((half_sign | half_exp | half_man) + 1) as u16
} else {
(half_sign | half_exp | half_man) as u16
}
}
pub fn f16_to_f32(i: u16) -> f32 {
// Check for signed zero
if i & 0x7FFFu16 == 0 {
return unsafe { mem::transmute((i as u32) << 16) };
}
let half_sign = (i & 0x8000u16) as u32;
let half_exp = (i & 0x7C00u16) as u32;
let half_man = (i & 0x03FFu16) as u32;
// Check for an infinity or NaN when all exponent bits set
if half_exp == 0x7C00u32 {
// Check for signed infinity if mantissa is zero
if half_man == 0 {
return unsafe { mem::transmute((half_sign << 16) | 0x7F800000u32) };
} else {
// NaN, only 1st mantissa bit is set
return core::f32::NAN;
}
}
// Calculate single-precision components with adjusted exponent
let sign = half_sign << 16;
// Unbias exponent
let unbiased_exp = ((half_exp as i32) >> 10) - 15;
// Check for subnormals, which will be normalized by adjusting exponent
if half_exp == 0 {
// Calculate how much to adjust the exponent by
let e = (half_man as u16).leading_zeros() - 6;
// Rebias and adjust exponent
let exp = (127 - 15 - e) << 23;
let man = (half_man << (14 + e)) & 0x7F_FF_FFu32;
return unsafe { mem::transmute(sign | exp | man) };
}
// Rebias exponent for a normalized normal
let exp = ((unbiased_exp + 127) as u32) << 23;
let man = (half_man & 0x03FFu32) << 13;
unsafe { mem::transmute(sign | exp | man) }
}
pub fn f16_to_f64(i: u16) -> f64 {
// Check for signed zero
if i & 0x7FFFu16 == 0 {
return unsafe { mem::transmute((i as u64) << 48) };
}
let half_sign = (i & 0x8000u16) as u64;
let half_exp = (i & 0x7C00u16) as u64;
let half_man = (i & 0x03FFu16) as u64;
// Check for an infinity or NaN when all exponent bits set
if half_exp == 0x7C00u64 {
// Check for signed infinity if mantissa is zero
if half_man == 0 {
return unsafe { mem::transmute((half_sign << 48) | 0x7FF0000000000000u64) };
} else {
// NaN, only 1st mantissa bit is set
return core::f64::NAN;
}
}
// Calculate double-precision components with adjusted exponent
let sign = half_sign << 48;
// Unbias exponent
let unbiased_exp = ((half_exp as i64) >> 10) - 15;
// Check for subnormals, which will be normalized by adjusting exponent
if half_exp == 0 {
// Calculate how much to adjust the exponent by
let e = (half_man as u16).leading_zeros() - 6;
// Rebias and adjust exponent
let exp = ((1023 - 15 - e) as u64) << 52;
let man = (half_man << (43 + e)) & 0xF_FFFF_FFFF_FFFFu64;
return unsafe { mem::transmute(sign | exp | man) };
}
// Rebias exponent for a normalized normal
let exp = ((unbiased_exp + 1023) as u64) << 52;
let man = (half_man & 0x03FFu64) << 42;
unsafe { mem::transmute(sign | exp | man) }
}
}
/// Contains utility functions to convert between slices of `u16` bits and `f16` numbers.
pub mod slice {
use super::f16;
use core::slice;
/// Reinterpret a mutable slice of `u16` bits as a mutable slice of `f16` numbers.
// The transmuted slice has the same life time as the original,
// Which prevents mutating the borrowed `mut [u16]` argument
// As long as the returned `mut [f16]` is borrowed.
#[inline]
pub fn from_bits_mut(bits: &mut [u16]) -> &mut [f16] {
let pointer = bits.as_ptr() as *mut f16;
let length = bits.len();
unsafe { slice::from_raw_parts_mut(pointer, length) }
}
/// Reinterpret a mutable slice of `f16` numbers as a mutable slice of `u16` bits.
// The transmuted slice has the same life time as the original,
// Which prevents mutating the borrowed `mut [f16]` argument
// As long as the returned `mut [u16]` is borrowed.
#[inline]
pub fn to_bits_mut(bits: &mut [f16]) -> &mut [u16] {
let pointer = bits.as_ptr() as *mut u16;
let length = bits.len();
unsafe { slice::from_raw_parts_mut(pointer, length) }
}
/// Reinterpret a slice of `u16` bits as a slice of `f16` numbers.
// The transmuted slice has the same life time as the original
#[inline]
pub fn from_bits(bits: &[u16]) -> &[f16] {
let pointer = bits.as_ptr() as *const f16;
let length = bits.len();
unsafe { slice::from_raw_parts(pointer, length) }
}
/// Reinterpret a slice of `f16` numbers as a slice of `u16` bits.
// The transmuted slice has the same life time as the original
#[inline]
pub fn to_bits(bits: &[f16]) -> &[u16] {
let pointer = bits.as_ptr() as *const u16;
let length = bits.len();
unsafe { slice::from_raw_parts(pointer, length) }
}
}
/// Contains utility functions to convert between vectors of `u16` bits and `f16` vectors.
///
/// This module is only available with the `std` feature.
#[cfg(feature = "std")]
pub mod vec {
use super::f16;
use core::mem;
/// Converts a vector of `u16` elements into a vector of `f16` elements.
/// This function merely reinterprets the contents of the vector,
/// so it's a zero-copy operation.
#[inline]
pub fn from_bits(bits: Vec<u16>) -> Vec<f16> {
let mut bits = bits;
// An f16 array has same length and capacity as u16 array
let length = bits.len();
let capacity = bits.capacity();
// Actually reinterpret the contents of the Vec<u16> as f16,
// knowing that structs are represented as only their members in memory,
// which is the u16 part of `f16(u16)`
let pointer = bits.as_mut_ptr() as *mut f16;
// Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted
mem::forget(bits);
// Finally construct a new Vec<f16> from the raw pointer
unsafe { Vec::from_raw_parts(pointer, length, capacity) }
}
/// Converts a vector of `f16` elements into a vector of `u16` elements.
/// This function merely reinterprets the contents of the vector,
/// so it's a zero-copy operation.
#[inline]
pub fn to_bits(numbers: Vec<f16>) -> Vec<u16> {
let mut numbers = numbers;
// An f16 array has same length and capacity as u16 array
let length = numbers.len();
let capacity = numbers.capacity();
// Actually reinterpret the contents of the Vec<f16> as u16,
// knowing that structs are represented as only their members in memory,
// which is the u16 part of `f16(u16)`
let pointer = numbers.as_mut_ptr() as *mut u16;
// Prevent running a destructor on the old Vec<u16>, so the pointer won't be deleted
mem::forget(numbers);
// Finally construct a new Vec<f16> from the raw pointer
unsafe { Vec::from_raw_parts(pointer, length, capacity) }
}
}
#[cfg(test)]
mod test {
use super::*;
use core;
use core::cmp::Ordering;
#[test]
fn test_f16_consts_from_f32() {
let one = f16::from_f32(1.0);
let zero = f16::from_f32(0.0);
let neg_zero = f16::from_f32(-0.0);
let inf = f16::from_f32(core::f32::INFINITY);
let neg_inf = f16::from_f32(core::f32::NEG_INFINITY);
let nan = f16::from_f32(core::f32::NAN);
assert_eq!(consts::ONE, one);
assert_eq!(consts::ZERO, zero);
assert_eq!(consts::NEG_ZERO, neg_zero);
assert_eq!(consts::INFINITY, inf);
assert_eq!(consts::NEG_INFINITY, neg_inf);
assert!(nan.is_nan());
assert!(consts::NAN.is_nan());
let e = f16::from_f32(core::f32::consts::E);
let pi = f16::from_f32(core::f32::consts::PI);
let frac_1_pi = f16::from_f32(core::f32::consts::FRAC_1_PI);
let frac_1_sqrt_2 = f16::from_f32(core::f32::consts::FRAC_1_SQRT_2);
let frac_2_pi = f16::from_f32(core::f32::consts::FRAC_2_PI);
let frac_2_sqrt_pi = f16::from_f32(core::f32::consts::FRAC_2_SQRT_PI);
let frac_pi_2 = f16::from_f32(core::f32::consts::FRAC_PI_2);
let frac_pi_3 = f16::from_f32(core::f32::consts::FRAC_PI_3);
let frac_pi_4 = f16::from_f32(core::f32::consts::FRAC_PI_4);
let frac_pi_6 = f16::from_f32(core::f32::consts::FRAC_PI_6);
let frac_pi_8 = f16::from_f32(core::f32::consts::FRAC_PI_8);
let ln_10 = f16::from_f32(core::f32::consts::LN_10);
let ln_2 = f16::from_f32(core::f32::consts::LN_2);
let log10_e = f16::from_f32(core::f32::consts::LOG10_E);
let log2_e = f16::from_f32(core::f32::consts::LOG2_E);
let sqrt_2 = f16::from_f32(core::f32::consts::SQRT_2);
assert_eq!(consts::E, e);
assert_eq!(consts::PI, pi);
assert_eq!(consts::FRAC_1_PI, frac_1_pi);
assert_eq!(consts::FRAC_1_SQRT_2, frac_1_sqrt_2);
assert_eq!(consts::FRAC_2_PI, frac_2_pi);
assert_eq!(consts::FRAC_2_SQRT_PI, frac_2_sqrt_pi);
assert_eq!(consts::FRAC_PI_2, frac_pi_2);
assert_eq!(consts::FRAC_PI_3, frac_pi_3);
assert_eq!(consts::FRAC_PI_4, frac_pi_4);
assert_eq!(consts::FRAC_PI_6, frac_pi_6);
assert_eq!(consts::FRAC_PI_8, frac_pi_8);
assert_eq!(consts::LN_10, ln_10);
assert_eq!(consts::LN_2, ln_2);
assert_eq!(consts::LOG10_E, log10_e);
assert_eq!(consts::LOG2_E, log2_e);
assert_eq!(consts::SQRT_2, sqrt_2);
}
#[test]
fn test_f16_consts_from_f64() {
let one = f16::from_f64(1.0);
let zero = f16::from_f64(0.0);
let neg_zero = f16::from_f64(-0.0);
let inf = f16::from_f64(core::f64::INFINITY);
let neg_inf = f16::from_f64(core::f64::NEG_INFINITY);
let nan = f16::from_f64(core::f64::NAN);
assert_eq!(consts::ONE, one);
assert_eq!(consts::ZERO, zero);
assert_eq!(consts::NEG_ZERO, neg_zero);
assert_eq!(consts::INFINITY, inf);
assert_eq!(consts::NEG_INFINITY, neg_inf);
assert!(nan.is_nan());
assert!(consts::NAN.is_nan());
let e = f16::from_f64(core::f64::consts::E);
let pi = f16::from_f64(core::f64::consts::PI);
let frac_1_pi = f16::from_f64(core::f64::consts::FRAC_1_PI);
let frac_1_sqrt_2 = f16::from_f64(core::f64::consts::FRAC_1_SQRT_2);
let frac_2_pi = f16::from_f64(core::f64::consts::FRAC_2_PI);
let frac_2_sqrt_pi = f16::from_f64(core::f64::consts::FRAC_2_SQRT_PI);
let frac_pi_2 = f16::from_f64(core::f64::consts::FRAC_PI_2);
let frac_pi_3 = f16::from_f64(core::f64::consts::FRAC_PI_3);
let frac_pi_4 = f16::from_f64(core::f64::consts::FRAC_PI_4);
let frac_pi_6 = f16::from_f64(core::f64::consts::FRAC_PI_6);
let frac_pi_8 = f16::from_f64(core::f64::consts::FRAC_PI_8);
let ln_10 = f16::from_f64(core::f64::consts::LN_10);
let ln_2 = f16::from_f64(core::f64::consts::LN_2);
let log10_e = f16::from_f64(core::f64::consts::LOG10_E);
let log2_e = f16::from_f64(core::f64::consts::LOG2_E);
let sqrt_2 = f16::from_f64(core::f64::consts::SQRT_2);
assert_eq!(consts::E, e);
assert_eq!(consts::PI, pi);
assert_eq!(consts::FRAC_1_PI, frac_1_pi);
assert_eq!(consts::FRAC_1_SQRT_2, frac_1_sqrt_2);
assert_eq!(consts::FRAC_2_PI, frac_2_pi);
assert_eq!(consts::FRAC_2_SQRT_PI, frac_2_sqrt_pi);
assert_eq!(consts::FRAC_PI_2, frac_pi_2);
assert_eq!(consts::FRAC_PI_3, frac_pi_3);
assert_eq!(consts::FRAC_PI_4, frac_pi_4);
assert_eq!(consts::FRAC_PI_6, frac_pi_6);
assert_eq!(consts::FRAC_PI_8, frac_pi_8);
assert_eq!(consts::LN_10, ln_10);
assert_eq!(consts::LN_2, ln_2);
assert_eq!(consts::LOG10_E, log10_e);
assert_eq!(consts::LOG2_E, log2_e);
assert_eq!(consts::SQRT_2, sqrt_2);
}
#[test]
fn test_nan_conversion() {
use core::mem;
let nan64: f64;
let neg_nan64: f64;
let nan32: f32;
let neg_nan32: f32;
unsafe {
nan64 = mem::transmute(0x7ff0_0000_0000_0001u64);
neg_nan64 = mem::transmute(0xfff0_0000_0000_0001u64);
nan32 = mem::transmute(0x7f80_0001u32);
neg_nan32 = mem::transmute(0xff80_0001u32);
}
let nan32_from_64 = nan64 as f32;
let neg_nan32_from_64 = neg_nan64 as f32;
let nan16_from_64 = f16::from_f64(nan64);
let neg_nan16_from_64 = f16::from_f64(neg_nan64);
let nan16_from_32 = f16::from_f32(nan32);
let neg_nan16_from_32 = f16::from_f32(neg_nan32);
assert!(nan64.is_nan());
assert!(neg_nan64.is_nan());
assert!(nan32.is_nan());
assert!(neg_nan32.is_nan());
assert!(nan32_from_64.is_nan());
assert!(neg_nan32_from_64.is_nan());
assert!(nan16_from_64.is_nan());
assert!(neg_nan16_from_64.is_nan());
assert!(nan16_from_32.is_nan());
assert!(neg_nan16_from_32.is_nan());
let sign64 = 1u64 << 63;
let sign32 = 1u32 << 31;
let sign16 = 1u16 << 15;
let nan64_u: u64;
let neg_nan64_u: u64;
let nan32_u: u32;
let neg_nan32_u: u32;
let nan32_from_64_u: u32;
let neg_nan32_from_64_u: u32;
let nan16_from_64_u: u16;
let neg_nan16_from_64_u: u16;
let nan16_from_32_u: u16;
let neg_nan16_from_32_u: u16;
unsafe {
nan64_u = mem::transmute(nan64);
neg_nan64_u = mem::transmute(neg_nan64);
nan32_u = mem::transmute(nan32);
neg_nan32_u = mem::transmute(neg_nan32);
nan32_from_64_u = mem::transmute(nan32_from_64);
neg_nan32_from_64_u = mem::transmute(neg_nan32_from_64);
nan16_from_64_u = mem::transmute(nan16_from_64);
neg_nan16_from_64_u = mem::transmute(neg_nan16_from_64);
nan16_from_32_u = mem::transmute(nan16_from_32);
neg_nan16_from_32_u = mem::transmute(neg_nan16_from_32);
}
assert_eq!(nan64_u & sign64, 0);
assert_eq!(neg_nan64_u & sign64, sign64);
assert_eq!(nan32_u & sign32, 0);
assert_eq!(neg_nan32_u & sign32, sign32);
assert_eq!(nan32_from_64_u & sign32, 0);
assert_eq!(neg_nan32_from_64_u & sign32, sign32);
assert_eq!(nan16_from_64_u & sign16, 0);
assert_eq!(neg_nan16_from_64_u & sign16, sign16);
assert_eq!(nan16_from_32_u & sign16, 0);
assert_eq!(neg_nan16_from_32_u & sign16, sign16);
}
#[test]
fn test_f16_to_f32() {
let f = f16::from_f32(7.0);
assert_eq!(f.to_f32(), 7.0f32);
// 7.1 is NOT exactly representable in 16-bit, it's rounded
let f = f16::from_f32(7.1);
let diff = (f.to_f32() - 7.1f32).abs();
// diff must be <= 4 * EPSILON, as 7 has two more significant bits than 1
assert!(diff <= 4.0 * consts::EPSILON.to_f32());
assert_eq!(f16::from_bits(0x0000_0001).to_f32(), 2.0f32.powi(-24));
assert_eq!(f16::from_bits(0x0000_0005).to_f32(), 5.0 * 2.0f32.powi(-24));
assert_eq!(f16::from_bits(0x0000_0001), f16::from_f32(2.0f32.powi(-24)));
assert_eq!(
f16::from_bits(0x0000_0005),
f16::from_f32(5.0 * 2.0f32.powi(-24))
);
}
#[test]
fn test_f16_to_f64() {
let f = f16::from_f64(7.0);
assert_eq!(f.to_f64(), 7.0f64);
// 7.1 is NOT exactly representable in 16-bit, it's rounded
let f = f16::from_f64(7.1);
let diff = (f.to_f64() - 7.1f64).abs();
// diff must be <= 4 * EPSILON, as 7 has two more significant bits than 1
assert!(diff <= 4.0 * consts::EPSILON.to_f64());
assert_eq!(f16::from_bits(0x0000_0001).to_f64(), 2.0f64.powi(-24));
assert_eq!(f16::from_bits(0x0000_0005).to_f64(), 5.0 * 2.0f64.powi(-24));
assert_eq!(f16::from_bits(0x0000_0001), f16::from_f64(2.0f64.powi(-24)));
assert_eq!(
f16::from_bits(0x0000_0005),
f16::from_f64(5.0 * 2.0f64.powi(-24))
);
}
#[test]
fn test_comparisons() {
let zero = f16::from_f64(0.0);
let one = f16::from_f64(1.0);
let neg_zero = f16::from_f64(-0.0);
let neg_one = f16::from_f64(-1.0);
assert_eq!(zero.partial_cmp(&neg_zero), Some(Ordering::Equal));
assert_eq!(neg_zero.partial_cmp(&zero), Some(Ordering::Equal));
assert!(zero == neg_zero);
assert!(neg_zero == zero);
assert!(!(zero != neg_zero));
assert!(!(neg_zero != zero));
assert!(!(zero < neg_zero));
assert!(!(neg_zero < zero));
assert!(zero <= neg_zero);
assert!(neg_zero <= zero);
assert!(!(zero > neg_zero));
assert!(!(neg_zero > zero));
assert!(zero >= neg_zero);
assert!(neg_zero >= zero);
assert_eq!(one.partial_cmp(&neg_zero), Some(Ordering::Greater));
assert_eq!(neg_zero.partial_cmp(&one), Some(Ordering::Less));
assert!(!(one == neg_zero));
assert!(!(neg_zero == one));
assert!(one != neg_zero);
assert!(neg_zero != one);
assert!(!(one < neg_zero));
assert!(neg_zero < one);
assert!(!(one <= neg_zero));
assert!(neg_zero <= one);
assert!(one > neg_zero);
assert!(!(neg_zero > one));
assert!(one >= neg_zero);
assert!(!(neg_zero >= one));
assert_eq!(one.partial_cmp(&neg_one), Some(Ordering::Greater));
assert_eq!(neg_one.partial_cmp(&one), Some(Ordering::Less));
assert!(!(one == neg_one));
assert!(!(neg_one == one));
assert!(one != neg_one);
assert!(neg_one != one);
assert!(!(one < neg_one));
assert!(neg_one < one);
assert!(!(one <= neg_one));
assert!(neg_one <= one);
assert!(one > neg_one);
assert!(!(neg_one > one));
assert!(one >= neg_one);
assert!(!(neg_one >= one));
}
#[test]
fn test_slice_conversions() {
use consts::*;
let bits = &[
E.to_bits(),
PI.to_bits(),
EPSILON.to_bits(),
FRAC_1_SQRT_2.to_bits(),
];
let numbers = &[E, PI, EPSILON, FRAC_1_SQRT_2];
// Convert from bits to numbers
let from_bits = slice::from_bits(bits);
assert_slice_contents_eq(from_bits, numbers);
// Convert from numbers back to bits
let to_bits = slice::to_bits(from_bits);
assert_slice_contents_eq(to_bits, bits);
}
#[test]
#[cfg(feature = "std")]
fn test_vec_conversions() {
use consts::*;
let numbers = vec![E, PI, EPSILON, FRAC_1_SQRT_2];
let bits = vec![
E.to_bits(),
PI.to_bits(),
EPSILON.to_bits(),
FRAC_1_SQRT_2.to_bits(),
];
let bits_cloned = bits.clone();
// Convert from bits to numbers
let from_bits = vec::from_bits(bits);
assert_slice_contents_eq(&from_bits, &numbers);
// Convert from numbers back to bits
let to_bits = vec::to_bits(from_bits);
assert_slice_contents_eq(&to_bits, &bits_cloned);
}
fn assert_slice_contents_eq<T: PartialEq + core::fmt::Debug>(a: &[T], b: &[T]) {
// Checks only pointer and len,
// but we know these are the same
// because we just transmuted them, so
assert_eq!(a, b);
// We need to perform manual content equality checks
for (a, b) in a.iter().zip(b.iter()) {
assert_eq!(a, b);
}
}
#[test]
fn test_mutablility(){
use consts::*;
let mut bits_array = [ PI.to_bits() ];
let bits = &mut bits_array[..];
{ // would not compile without these braces
// TODO: add automated test to check that it does not compile without braces
let numbers = slice::from_bits_mut(bits);
numbers[0] = E;
}
assert_eq!(bits, &[ E.to_bits() ]);
bits[0] = LN_2.to_bits();
assert_eq!(bits, &[ LN_2.to_bits() ]);
}
}
|
//! Convert HTML to text formats.
//!
//! This crate renders HTML into a text format, wrapped to a specified width.
//! This can either be plain text or with extra annotations to (for example)
//! show in a terminal which supports colours.
//!
//! # Examples
//!
//! ```rust
//! # use html2text::from_read;
//! let html = b"
//! <ul>
//! <li>Item one</li>
//! <li>Item two</li>
//! <li>Item three</li>
//! </ul>";
//! assert_eq!(from_read(&html[..], 20),
//! "\
//! * Item one
//! * Item two
//! * Item three
//! ");
//! ```
//! A couple of simple demonstration programs are included as examples:
//!
//! ### html2text
//!
//! The simplest example uses `from_read` to convert HTML on stdin into plain
//! text:
//!
//! ```sh
//! $ cargo run --example html2text < foo.html
//! [...]
//! ```
//!
//! ### html2term
//!
//! A very simple example of using the rich interface (`from_read_rich`) for a
//! slightly interactive console HTML viewer is provided as `html2term`.
//!
//! ```sh
//! $ cargo run --example html2term foo.html
//! [...]
//! ```
//!
//! Note that this example takes the HTML file as a parameter so that it can
//! read keys from stdin.
//!
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
#![deny(missing_docs)]
extern crate html5ever_atoms;
#[macro_use] extern crate html5ever;
extern crate unicode_width;
#[macro_use]
mod macros;
pub mod render;
use render::Renderer;
use render::text_renderer::{TextRenderer,
TextDecorator,PlainDecorator,RichDecorator,
RichAnnotation,TaggedLine,RenderLine};
use std::io;
use std::io::Write;
use std::cmp::max;
use std::iter::{once,repeat};
use std::ops::{Deref,DerefMut};
use html5ever::{parse_document};
use html5ever::driver::ParseOpts;
use html5ever::tree_builder::TreeBuilderOpts;
use html5ever::rcdom::{self,RcDom,Handle,NodeData::{Text,Element,Document,Comment}};
use html5ever::tendril::TendrilSink;
/// A dummy writer which does nothing
struct Discard {}
impl Write for Discard {
fn write(&mut self, bytes: &[u8]) -> std::result::Result<usize, io::Error> { Ok(bytes.len()) }
fn flush(&mut self) -> std::result::Result<(), io::Error> { Ok(()) }
}
fn get_text(handle: Handle) -> String {
let node = &*handle;
let mut result = String::new();
if let Text { contents: ref tstr } = node.data {
result.push_str(&tstr.borrow());
} else {
for child in &*node.children.borrow() {
result.push_str(&get_text(child.clone()));
}
}
result
}
const MIN_WIDTH: usize = 5;
/// Size information/estimate
#[derive(Debug,Copy,Clone)]
pub struct SizeEstimate {
size: usize, // Rough overall size
min_width: usize, // The narrowest possible
}
impl Default for SizeEstimate {
fn default() -> SizeEstimate {
SizeEstimate {
size: 0,
min_width: 0,
}
}
}
impl SizeEstimate {
/// Combine two estimates into one (add size and widest required)
pub fn add(self, other: SizeEstimate) -> SizeEstimate {
SizeEstimate {
size: self.size + other.size,
min_width: max(self.min_width, other.min_width),
}
}
}
#[derive(Debug)]
/// Render tree table cell
pub struct RenderTableCell {
colspan: usize,
content: Vec<RenderNode>,
size_estimate: Option<SizeEstimate>,
col_width: Option<usize>, // Actual width to use
}
impl RenderTableCell {
/// Render this cell to a builder.
pub fn render<T:Write, R:Renderer>(&mut self, _builder: &mut R, _err_out: &mut T)
{
unimplemented!()
//render_tree_children_to_string(builder, &mut self.content, err_out)
}
/// Calculate or return the estimate size of the cell
pub fn get_size_estimate(&mut self) -> SizeEstimate {
if self.size_estimate.is_none() {
let size = self.content
.iter_mut()
.map(|node| node.get_size_estimate())
.fold(Default::default(), SizeEstimate::add);
self.size_estimate = Some(size);
}
self.size_estimate.unwrap()
}
}
#[derive(Debug)]
/// Render tree table row
pub struct RenderTableRow {
cells: Vec<RenderTableCell>,
col_sizes: Option<Vec<usize>>,
}
impl RenderTableRow {
/// Return a mutable iterator over the cells.
pub fn cells(&mut self) -> std::slice::IterMut<RenderTableCell> {
self.cells.iter_mut()
}
/// Count the number of cells in the row.
/// Takes into account colspan.
pub fn num_cells(&self) -> usize {
self.cells.iter().map(|cell| cell.colspan).sum()
}
/// Return an iterator over (column, &cell)s, which
/// takes into account colspan.
pub fn cell_columns(&mut self) -> Vec<(usize, &mut RenderTableCell)> {
let mut result = Vec::new();
let mut colno = 0;
for cell in &mut self.cells {
let colspan = cell.colspan;
result.push((colno, cell));
colno += colspan;
}
result
}
/// Return the contained cells as RenderNodes, annotated with their
/// widths if available. Skips cells with no width allocated.
pub fn into_cells(self) -> Vec<RenderNode> {
let mut result = Vec::new();
let mut colno = 0;
let col_sizes = self.col_sizes.unwrap();
for mut cell in self.cells {
let colspan = cell.colspan;
let col_width: usize = col_sizes[colno..colno+cell.colspan].iter().sum();
if col_width > 1 {
cell.col_width = Some(col_width - 1);
result.push(RenderNode::new(RenderNodeInfo::TableCell(cell)));
}
colno += colspan;
}
result
}
}
#[derive(Debug)]
/// A representation of a table render tree with metadata.
pub struct RenderTable {
rows: Vec<RenderTableRow>,
num_columns: usize,
size_estimate: Option<SizeEstimate>,
}
impl RenderTable {
/// Create a new RenderTable with the given rows
pub fn new(rows: Vec<RenderTableRow>) -> RenderTable {
let num_columns = rows.iter()
.map(|r| r.num_cells()).max().unwrap_or(0);
RenderTable {
rows: rows,
num_columns: num_columns,
size_estimate: None,
}
}
/// Return an iterator over the rows.
pub fn rows(&mut self) -> std::slice::IterMut<RenderTableRow> {
self.rows.iter_mut()
}
/// Consume this and return a Vec<RenderNode> containing the children;
/// the children know the column sizes required.
pub fn into_rows(self, col_sizes: Vec<usize>) -> Vec<RenderNode> {
self.rows
.into_iter()
.map(|mut tr| {
tr.col_sizes = Some(col_sizes.clone());
RenderNode::new(RenderNodeInfo::TableRow(tr))
})
.collect()
}
fn calc_size_estimate(&mut self) {
if self.num_columns == 0 {
self.size_estimate = Some(SizeEstimate { size: 0, min_width: 0 });
return;
}
let mut sizes: Vec<SizeEstimate> = vec![Default::default(); self.num_columns];
// For now, a simple estimate based on adding up sub-parts.
for row in self.rows() {
let mut colno = 0usize;
for cell in row.cells() {
let cellsize = cell.get_size_estimate();
for colnum in 0..cell.colspan {
sizes[colno + colnum].size += cellsize.size / cell.colspan;
sizes[colno + colnum].min_width = max(sizes[colno+colnum].min_width/cell.colspan, cellsize.min_width);
}
colno += cell.colspan;
}
}
let size = sizes.iter().map(|s| s.size).sum(); // Include borders?
let min_width = sizes.iter().map(|s| s.min_width).sum::<usize>() + self.num_columns-1;
self.size_estimate = Some(SizeEstimate { size: size, min_width: min_width });
}
/// Calculate and store (or return stored value) of estimated size
pub fn get_size_estimate(&mut self) -> SizeEstimate {
if self.size_estimate.is_none() {
self.calc_size_estimate();
}
self.size_estimate.unwrap()
}
}
/// The node-specific information distilled from the DOM.
#[derive(Debug)]
pub enum RenderNodeInfo {
/// Some text.
Text(String),
/// A group of nodes collected together.
Container(Vec<RenderNode>),
/// A link with contained nodes
Link(String, Vec<RenderNode>),
/// An emphasised region
Em(Vec<RenderNode>),
/// A strong region
Strong(Vec<RenderNode>),
/// A code region
Code(Vec<RenderNode>),
/// An image (title)
Img(String),
/// A block element with children
Block(Vec<RenderNode>),
/// A header (h1, h2, ...) with children
Header(usize, Vec<RenderNode>),
/// A Div element with children
Div(Vec<RenderNode>),
/// A preformatted region.
Pre(String),
/// A blockquote
BlockQuote(Vec<RenderNode>),
/// An unordered list
Ul(Vec<RenderNode>),
/// An ordered list
Ol(i64, Vec<RenderNode>),
/// A line break
Break,
/// A table
Table(RenderTable),
/// A set of table rows (from either <thead> or <tbody>
TableBody(Vec<RenderTableRow>),
/// Table row (must only appear within a table body)
TableRow(RenderTableRow),
/// Table cell (must only appear within a table row)
TableCell(RenderTableCell),
/// Start of a named HTML fragment
FragStart(String),
}
/// Common fields from a node.
#[derive(Debug)]
pub struct RenderNode {
size_estimate: Option<SizeEstimate>,
info: RenderNodeInfo,
}
impl RenderNode {
/// Create a node from the RenderNodeInfo.
pub fn new(info: RenderNodeInfo) -> RenderNode {
RenderNode {
size_estimate: None,
info: info,
}
}
/// Get a size estimate (~characters)
pub fn get_size_estimate(&mut self) -> SizeEstimate {
// If it's already calculated, then just return the answer.
if let Some(s) = self.size_estimate {
return s;
};
use RenderNodeInfo::*;
// Otherwise, make an estimate.
let estimate = match self.info {
Text(ref t) |
Img(ref t) |
Pre(ref t) => {
let len = t.trim().len();
SizeEstimate {
size: len,
min_width: if len > 0 { MIN_WIDTH } else { 0 },
}
},
Container(ref mut v) |
Link(_, ref mut v) |
Em(ref mut v) |
Strong(ref mut v) |
Code(ref mut v) |
Block(ref mut v) |
Div(ref mut v) |
BlockQuote(ref mut v) |
Ul(ref mut v) |
Ol(_, ref mut v) => {
v.iter_mut()
.map(RenderNode::get_size_estimate)
.fold(Default::default(), SizeEstimate::add)
},
Header(level, ref mut v) => {
v.iter_mut()
.map(RenderNode::get_size_estimate)
.fold(Default::default(), SizeEstimate::add).add(SizeEstimate {size:0, min_width: MIN_WIDTH+level+2})
},
Break => SizeEstimate { size: 1, min_width: 1 },
Table(ref mut t) => {
t.get_size_estimate()
},
TableRow(_)|TableBody(_)|TableCell(_) => {
unimplemented!()
},
FragStart(_) => Default::default(),
};
self.size_estimate = Some(estimate);
estimate
}
}
/// Make a Vec of RenderNodes from the children of a node.
fn children_to_render_nodes<T:Write>(handle: Handle, err_out: &mut T) -> Vec<RenderNode> {
/* process children, but don't add anything */
let children = handle.children
.borrow()
.iter()
.flat_map(|ch| dom_to_render_tree(ch.clone(), err_out))
.collect();
children
}
/// Make a Vec of RenderNodes from the <li>children of a node.
fn list_children_to_render_nodes<T:Write>(handle: Handle, err_out: &mut T) -> Vec<RenderNode> {
let mut children = Vec::new();
for child in handle.children.borrow().iter() {
match child.data {
Element { ref name, .. } => {
match name.expanded() {
expanded_name!(html "li") => {
let li_children = children_to_render_nodes(child.clone(), err_out);
children.push(RenderNode::new(RenderNodeInfo::Block(li_children)));
},
_ => {},
}
},
Comment { .. } => {},
_ => { html_trace!("Unhandled in list: {:?}\n", child); },
}
}
children
}
/// Convert a table into a RenderNode
fn table_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
pending(handle, |_,rowset| {
let mut rows = vec![];
for bodynode in rowset {
if let RenderNodeInfo::TableBody(body) = bodynode.info {
rows.extend(body);
} else {
html_trace!("Found in table: {:?}", bodynode.info);
}
}
Some(RenderNode::new(RenderNodeInfo::Table(RenderTable::new(rows))))
})
}
/// Add rows from a thead or tbody.
fn tbody_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
pending(handle, |_,rowchildren| {
let rows = rowchildren.into_iter()
.flat_map(|rownode| {
if let RenderNodeInfo::TableRow(row) = rownode.info {
Some(row)
} else {
html_trace!(" [[tbody child: {:?}]]", rownode);
None
}})
.collect();
Some(RenderNode::new(RenderNodeInfo::TableBody(rows)))
})
}
/// Convert a table row to a RenderTableRow
fn tr_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
pending(handle, |_, cellnodes| {
let cells = cellnodes.into_iter()
.flat_map(|cellnode| {
if let RenderNodeInfo::TableCell(cell) = cellnode.info {
Some(cell)
} else {
html_trace!(" [[tr child: {:?}]]", cellnode);
None
}})
.collect();
Some(RenderNode::new(RenderNodeInfo::TableRow(RenderTableRow{cells, col_sizes: None})))
})
}
/// Convert a single table cell to a render node.
fn td_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
let mut colspan = 1;
if let Element { ref attrs, .. } = handle.data {
for attr in attrs.borrow().iter() {
if &attr.name.local == "colspan" {
let v:&str = &*attr.value;
colspan = v.parse().unwrap_or(1);
}
}
}
pending(handle, move |_, children| {
Some(RenderNode::new(RenderNodeInfo::TableCell(RenderTableCell {
colspan: colspan,
content: children,
size_estimate: None,
col_width: None,
})))
})
}
/// A reducer which combines results from mapping children into
/// the result for the current node. Takes a context and a
/// vector of results and returns a new result (or nothing).
type ResultReducer<C, R> = dyn Fn(&mut C, Vec<R>) -> Option<R>;
/// A closure to call before processing a child node.
type ChildPreFn<C, N> = dyn Fn(&mut C, &N);
/// A closure to call after processing a child node,
/// before adding the result to the processed results
/// vector.
type ChildPostFn<C, R> = dyn Fn(&mut C, &R);
/// The result of trying to render one node.
enum TreeMapResult<C, N, R> {
/// A completed result.
Finished(R),
/// Deferred completion - can be turned into a result
/// once the vector of children are processed.
PendingChildren {
children: Vec<N>,
cons: Box<ResultReducer<C, R>>,
prefn: Option<Box<ChildPreFn<C, N>>>,
postfn: Option<Box<ChildPostFn<C, R>>>,
},
/// Nothing (e.g. a comment or other ignored element).
Nothing
}
fn tree_map_reduce<C, N, R, M>(context: &mut C,
top: N,
mut process_node: M) -> Option<R>
where M: FnMut(&mut C, N) -> TreeMapResult<C, N, R>,
{
/// A node partially decoded, waiting for its children to
/// be processed.
struct PendingNode<C, R, N> {
/// How to make the node once finished
construct: Box<ResultReducer<C, R>>,
/// Called before processing each child
prefn: Option<Box<ChildPreFn<C, N>>>,
/// Called after processing each child
postfn: Option<Box<ChildPostFn<C, R>>>,
/// Children already processed
children: Vec<R>,
/// Iterator of child nodes not yet processed
to_process: std::vec::IntoIter<N>,
}
let mut pending_stack = vec![
PendingNode {
// We only expect one child, which we'll just return.
construct: Box::new(|_, mut cs| cs.pop()),
prefn: None,
postfn: None,
children: Vec::new(),
to_process: vec![top].into_iter(),
}
];
loop {
// Get the next child node to process
let next_node = pending_stack.last_mut()
.unwrap()
.to_process
.next();
if let Some(h) = next_node {
pending_stack.last_mut().unwrap().prefn.as_ref().map(|ref f| f(context, &h));
match process_node(context, h) {
TreeMapResult::Finished(result) => {
pending_stack.last_mut().unwrap().postfn.as_ref().map(|ref f| f(context, &result));
pending_stack.last_mut().unwrap().children.push(result);
}
TreeMapResult::PendingChildren { children, cons, prefn, postfn } => {
pending_stack.push(PendingNode {
construct: cons,
prefn,
postfn,
children: Vec::new(),
to_process: children.into_iter(),
});
},
TreeMapResult::Nothing => {},
};
} else {
// No more children, so finally construct the parent.
let completed = pending_stack.pop().unwrap();
let reduced = (completed.construct)(context, completed.children);
if let Some(node) = reduced {
if let Some(parent) = pending_stack.last_mut() {
parent.postfn.as_ref().map(|ref f| f(context, &node));
parent.children.push(node);
} else {
// Finished the whole stack!
break Some(node);
}
} else {
/* Finished the stack, and have nothing */
if pending_stack.is_empty() {
break None;
}
}
}
}
}
/// Convert a DOM tree or subtree into a render tree.
pub fn dom_to_render_tree<T:Write>(handle: Handle, err_out: &mut T) -> Option<RenderNode> {
html_trace!("### dom_to_render_tree: HTML: {:?}", handle);
let result = tree_map_reduce(&mut (), handle,
|_, handle| process_dom_node(handle, err_out),
);
html_trace!("### dom_to_render_tree: out= {:#?}", result);
result
}
fn pending<F>(handle: Handle, f: F) -> TreeMapResult<(), Handle, RenderNode>
where //for<'a> F: Fn(&'a mut C, Vec<RenderNode>) -> Option<RenderNode>+'static
for<'r> F: Fn(&'r mut (), std::vec::Vec<RenderNode>) -> Option<RenderNode>+'static
{
TreeMapResult::PendingChildren {
children: handle.children.borrow().clone(),
cons: Box::new(f),
prefn: None,
postfn: None
}
}
/// Prepend a FragmentStart (or analogous) marker to an existing
/// RenderNode.
fn prepend_marker(prefix: RenderNode, mut orig: RenderNode) -> RenderNode {
use RenderNodeInfo::*;
match orig.info {
// For block elements such as Block and Div, we need to insert
// the node at the front of their children array, otherwise
// the renderer is liable to drop the fragment start marker
// _before_ the new line indicating the end of the previous
// paragraph.
//
// For Container, we do the same thing just to make the data
// less pointlessly nested.
Block(ref mut children) |
Div(ref mut children) |
BlockQuote(ref mut children) |
Container(ref mut children) |
TableCell(RenderTableCell { content: ref mut children, .. }) => {
children.insert(0, prefix);
// Now return orig, but we do that outside the match so
// that we've given back the borrowed ref 'children'.
},
// For table rows and tables, push down if there's any content.
TableRow(ref mut rrow) => {
// If the row is empty, then there isn't really anything
// to attach the fragment start to.
if rrow.cells.len() > 0 {
rrow.cells[0].content.insert(0, prefix);
}
},
Table(ref mut rtable) => {
// If the row is empty, then there isn't really anything
// to attach the fragment start to.
if rtable.rows.len() > 0 {
let rrow = &mut rtable.rows[0];
if rrow.cells.len() > 0 {
rrow.cells[0].content.insert(0, prefix);
}
}
},
// For anything else, just make a new Container with the
// prefix node and the original one.
_ => {
return RenderNode::new(Container(vec![prefix, orig]));
},
}
orig
}
fn process_dom_node<T:Write>(handle: Handle, err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
use TreeMapResult::*;
use RenderNodeInfo::*;
match handle.clone().data {
Document => pending(handle, |&mut (), cs| Some(RenderNode::new(Container(cs)))),
Comment { .. } => Nothing,
Element { ref name, ref attrs, .. } => {
let mut frag_from_name_attr = false;
let result = match name.expanded() {
expanded_name!(html "html") |
expanded_name!(html "span") |
expanded_name!(html "body") => {
/* process children, but don't add anything */
pending(handle, |_,cs| Some(RenderNode::new(Container(cs))))
},
expanded_name!(html "link") |
expanded_name!(html "meta") |
expanded_name!(html "hr") |
expanded_name!(html "script") |
expanded_name!(html "style") |
expanded_name!(html "head") => {
/* Ignore the head and its children */
Nothing
},
expanded_name!(html "a") => {
let borrowed = attrs.borrow();
let mut target = None;
frag_from_name_attr = true;
for attr in borrowed.iter() {
if &attr.name.local == "href" {
target = Some(&*attr.value);
break;
}
}
PendingChildren{
children: handle.children.borrow().clone(),
cons: if let Some(href) = target {
// We need the closure to own the string it's going to use.
// Unfortunately that means we ideally want FnOnce; but
// that doesn't yet work in a Box. Box<FnBox()> does, but
// is unstable. So we'll just move a string in and clone
// it on use.
let href: String = href.into();
Box::new(move |_, cs| Some(RenderNode::new(Link(href.clone(), cs))))
} else {
Box::new(|_, cs| Some(RenderNode::new(Container(cs))))
},
prefn: None, postfn: None,
}
},
expanded_name!(html "em") => {
pending(handle, |_, cs| Some(RenderNode::new(Em(cs))))
},
expanded_name!(html "strong") => {
pending(handle, |_, cs| Some(RenderNode::new(Strong(cs))))
},
expanded_name!(html "code") => {
pending(handle, |_, cs| Some(RenderNode::new(Code(cs))))
},
expanded_name!(html "img") => {
let borrowed = attrs.borrow();
let mut title = None;
for attr in borrowed.iter() {
if &attr.name.local == "alt" {
title = Some(&*attr.value);
break;
}
}
if let Some(title) = title {
Finished(RenderNode::new(Img(title.into())))
} else {
Nothing
}
},
expanded_name!(html "h1") |
expanded_name!(html "h2") |
expanded_name!(html "h3") |
expanded_name!(html "h4") => {
let level: usize = name.local[1..].parse().unwrap();
pending(handle, move |_, cs| Some(RenderNode::new(Header(level, cs))))
},
expanded_name!(html "p") => {
pending(handle, |_, cs| Some(RenderNode::new(Block(cs))))
},
expanded_name!(html "div") => {
pending(handle, |_, cs| Some(RenderNode::new(Div(cs))))
},
expanded_name!(html "pre") => {
Finished(RenderNode::new(Pre(get_text(handle))))
},
expanded_name!(html "br") => {
Finished(RenderNode::new(Break))
}
expanded_name!(html "table") => {
table_to_render_tree(handle.clone(), err_out)
},
expanded_name!(html "thead") |
expanded_name!(html "tbody") => {
tbody_to_render_tree(handle.clone(), err_out)
},
expanded_name!(html "tr") => {
tr_to_render_tree(handle.clone(), err_out)
},
expanded_name!(html "th") |
expanded_name!(html "td") => {
td_to_render_tree(handle.clone(), err_out)
}
expanded_name!(html "blockquote") => {
pending(handle, |_, cs| Some(RenderNode::new(BlockQuote(cs))))
},
expanded_name!(html "ul") => {
Finished(RenderNode::new(Ul(list_children_to_render_nodes(handle.clone(), err_out))))
},
expanded_name!(html "ol") => {
let borrowed = attrs.borrow();
let mut start = 1;
for attr in borrowed.iter() {
if &attr.name.local == "start" {
start = attr.value.parse().ok().unwrap_or(1);
break;
}
}
Finished(RenderNode::new(Ol(start, list_children_to_render_nodes(handle.clone(), err_out))))
},
_ => {
html_trace!("Unhandled element: {:?}\n", name.local);
pending(handle, |_, cs| Some(RenderNode::new(Container(cs))))
//None
},
};
let mut fragment = None;
let borrowed = attrs.borrow();
for attr in borrowed.iter() {
if &attr.name.local == "id" ||
(frag_from_name_attr && &attr.name.local == "name")
{
fragment = Some(attr.value.to_string());
break;
}
}
if let Some(fragname) = fragment {
match result {
Finished(node) => Finished(prepend_marker(RenderNode::new(FragStart(fragname)), node)),
Nothing => Finished(RenderNode::new(FragStart(fragname))),
PendingChildren{children, cons, prefn, postfn} => {
let fragname: String = fragname.into();
PendingChildren {
children: children,
prefn: prefn,
postfn: postfn,
cons: Box::new(move |ctx,ch| {
let fragnode = RenderNode::new(FragStart(fragname.clone()));
match cons(ctx,ch) {
None => Some(fragnode),
Some(node) => Some(prepend_marker(fragnode, node)),
}
}),
}
},
}
} else {
result
}
},
rcdom::NodeData::Text { contents: ref tstr } => {
Finished(RenderNode::new(Text((&*tstr.borrow()).into())))
}
_ => {
// NodeData doesn't have a Debug impl.
write!(err_out, "Unhandled node type.\n").unwrap();
Nothing
},
}
}
/// Context to use during tree parsing.
/// This mainly gives access to a Renderer, but needs to be able to push
/// new ones on for nested structures.
struct BuilderStack<R:Renderer> {
builders: Vec<R>,
}
impl<R:Renderer> BuilderStack<R> {
pub fn new(builder: R) -> BuilderStack<R> {
BuilderStack {
builders: vec![builder],
}
}
/// Push a new builder onto the stack
pub fn push(&mut self, builder: R) {
self.builders.push(builder);
}
/// Pop off the top builder and return it.
/// Panics if empty
pub fn pop(&mut self) -> R {
self.builders.pop().unwrap()
}
/// Pop off the only builder and return it.
/// panics if there aren't exactly 1 available.
pub fn into_inner(mut self) -> R {
assert_eq!(self.builders.len(), 1);
self.builders.pop().unwrap()
}
}
impl<R:Renderer> Deref for BuilderStack<R> {
type Target = R;
fn deref(&self) -> &R {
self.builders.last().expect("Underflow in BuilderStack")
}
}
impl<R:Renderer> DerefMut for BuilderStack<R> {
fn deref_mut(&mut self) -> &mut R {
self.builders.last_mut().expect("Underflow in BuilderStack")
}
}
fn render_tree_to_string<T:Write, R:Renderer>(builder: R, tree: RenderNode,
err_out: &mut T) -> R {
let mut bs = BuilderStack::new(builder);
tree_map_reduce(&mut bs, tree,
|builders, node| do_render_node(builders, node, err_out),
);
bs.into_inner()
}
fn pending2<R: Renderer, F: Fn(&mut BuilderStack<R>, Vec<Option<R>>) -> Option<Option<R>> + 'static>(children: Vec<RenderNode>, f: F) -> TreeMapResult<BuilderStack<R>, RenderNode, Option<R>> {
TreeMapResult::PendingChildren{
children: children,
cons: Box::new(f),
prefn: None,
postfn: None
}
}
fn do_render_node<'a, 'b, T: Write, R: Renderer>(builder: &mut BuilderStack<R>,
tree: RenderNode,
err_out: &'b mut T)
-> TreeMapResult<BuilderStack<R>, RenderNode, Option<R>>
{
use TreeMapResult::*;
use RenderNodeInfo::*;
match tree.info {
Text(ref tstr) => {
builder.add_inline_text(tstr);
Finished(None)
},
Container(children) => {
pending2(children, |_, _| Some(None))
},
Link(href, children) => {
builder.start_link(&href);
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_link();
Some(None)
})
},
Em(children) => {
builder.start_emphasis();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_emphasis();
Some(None)
})
},
Strong(children) => {
builder.start_strong();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_strong();
Some(None)
})
},
Code(children) => {
builder.start_code();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_code();
Some(None)
})
},
Img(title) => {
builder.add_image(&title);
Finished(None)
},
Block(children) => {
builder.start_block();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_block();
Some(None)
})
},
Header(level, children) => {
let min_width = max(builder.width(), 1 + level + 1);
let sub_builder = builder.new_sub_renderer(min_width - (1 + level));
builder.push(sub_builder);
pending2(children, move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
let qs: String = "#".repeat(level) + " ";
builder.start_block();
builder.append_subrender(sub_builder, repeat(&qs[..]));
builder.end_block();
Some(None)
})
},
Div(children) => {
builder.new_line();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.new_line();
Some(None)
})
},
Pre(ref formatted) => {
builder.add_preformatted_block(formatted);
Finished(None)
},
BlockQuote(children) => {
let sub_builder = builder.new_sub_renderer(builder.width()-2);
builder.push(sub_builder);
pending2(children, |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
builder.start_block();
builder.append_subrender(sub_builder, repeat("> "));
builder.end_block();
Some(None)
})
},
Ul(items) => {
builder.start_block();
TreeMapResult::PendingChildren{
children: items,
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(|builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.new_sub_renderer(builder.width()-2);
builder.push(sub_builder);
})),
postfn: Some(Box::new(|builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
builder.append_subrender(sub_builder, once("* ").chain(repeat(" ")));
})),
}
},
Ol(start, items) => {
builder.start_block();
let num_items = items.len();
// The prefix width could be at either end if the start is negative.
let min_number = start;
// Assumption: num_items can't overflow isize.
let max_number = start + (num_items as i64) - 1;
let prefix_width_min = format!("{}", min_number).len() + 2;
let prefix_width_max = format!("{}", max_number).len() + 2;
let prefix_width = max(prefix_width_min, prefix_width_max);
let prefixn = format!("{: <width$}", "", width=prefix_width);
use std::cell::Cell;
let i: Cell<_> = Cell::new(start);
TreeMapResult::PendingChildren{
children: items,
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.new_sub_renderer(builder.width()-prefix_width);
builder.push(sub_builder);
})),
postfn: Some(Box::new(move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
let prefix1 = format!("{}.", i.get());
let prefix1 = format!("{: <width$}", prefix1, width=prefix_width);
builder.append_subrender(sub_builder, once(prefix1.as_str()).chain(repeat(prefixn.as_str())));
i.set(i.get() + 1);
})),
}
},
Break => {
builder.new_line_hard();
Finished(None)
},
Table(tab) => {
render_table_tree(builder.deref_mut(), tab, err_out)
},
TableRow(row) => {
render_table_row(builder.deref_mut(), row, err_out)
},
TableBody(_) => {
unimplemented!("Unexpected TableBody while rendering")
},
TableCell(cell) => {
render_table_cell(builder.deref_mut(), cell, err_out)
},
FragStart(fragname) => {
builder.record_frag_start(&fragname);
Finished(None)
},
}
}
fn render_table_tree<T:Write, R:Renderer>(builder: &mut R, mut table: RenderTable, _err_out: &mut T) -> TreeMapResult<BuilderStack<R>, RenderNode, Option<R>>
{
/* Now lay out the table. */
let num_columns = table.num_columns;
/* Heuristic: scale the column widths according to how much content there is. */
let mut col_sizes: Vec<SizeEstimate> = vec![Default::default(); num_columns];
for row in table.rows() {
let mut colno = 0;
for cell in row.cells() {
// FIXME: get_size_estimate is still recursive.
let mut estimate = cell.get_size_estimate();
// If the cell has a colspan>1, then spread its size between the
// columns.
estimate.size /= cell.colspan;
estimate.min_width /= cell.colspan;
for i in 0..cell.colspan {
col_sizes[colno + i] = (col_sizes[colno + i]).add(estimate);
}
colno += cell.colspan;
}
}
let tot_size: usize = col_sizes.iter().map(|est| est.size).sum();
let width = builder.width();
let mut col_widths:Vec<usize> = col_sizes.iter()
.map(|sz| {
if sz.size == 0 {
0
} else {
max(sz.size * width / tot_size, sz.min_width)
}
}).collect();
/* The minimums may have put the total width too high */
while col_widths.iter().cloned().sum::<usize>() > width {
let (i, _) = col_widths.iter()
.cloned()
.enumerate()
.max_by_key(|&(colno, width)| (width.saturating_sub(col_sizes[colno].min_width), width, usize::max_value() - colno ))
.unwrap();
col_widths[i] -= 1;
}
if !col_widths.is_empty() {
// Slight fudge; we're not drawing extreme edges, so one of the columns
// can gets a free character cell from not having a border.
// make it the last.
let last = col_widths.len() - 1;
col_widths[last] += 1;
}
builder.start_block();
builder.add_horizontal_border();
TreeMapResult::PendingChildren{
children: table.into_rows(col_widths),
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(|_, _| { })),
postfn: Some(Box::new(|_, _| { })),
}
}
fn render_table_row<T:Write, R:Renderer>(_builder: &mut R, row: RenderTableRow, _err_out: &mut T) -> TreeMapResult<BuilderStack<R>, RenderNode, Option<R>>
{
TreeMapResult::PendingChildren{
children: row.into_cells(),
cons: Box::new(|builders, children| {
let children: Vec<_> = children.into_iter().map(Option::unwrap).collect();
if children.iter().any(|c| !c.empty()) {
builders.append_columns_with_borders(children, true);
}
Some(None)
}),
prefn: Some(Box::new(|builder: &mut BuilderStack<R>, node| {
if let RenderNodeInfo::TableCell(ref cell) = node.info {
let sub_builder = builder.new_sub_renderer(cell.col_width.unwrap());
builder.push(sub_builder);
} else {
panic!()
}
})),
postfn: Some(Box::new(|_builder: &mut BuilderStack<R>, _| {
})),
}
}
fn render_table_cell<T:Write, R:Renderer>(_builder: &mut R, cell: RenderTableCell, _err_out: &mut T) -> TreeMapResult<BuilderStack<R>, RenderNode, Option<R>>
{
pending2(cell.content, |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
Some(Some(sub_builder))
})
}
/// Reads HTML from `input`, decorates it using `decorator`, and
/// returns a `String` with text wrapped to `width` columns.
pub fn from_read_with_decorator<R, D>
(mut input: R, width: usize, decorator: D) -> String
where R: io::Read, D: TextDecorator
{
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut input)
.unwrap();
let builder = TextRenderer::new(width, decorator);
let render_tree = dom_to_render_tree(dom.document.clone(), &mut Discard{}).unwrap();
let builder = render_tree_to_string(builder, render_tree, &mut Discard{});
builder.into_string()
}
/// Reads HTML from `input`, and returns a `String` with text wrapped to
/// `width` columns.
pub fn from_read<R>(input: R, width: usize) -> String where R: io::Read {
let decorator = PlainDecorator::new();
from_read_with_decorator(input, width, decorator)
}
/// Reads HTML from `input`, and returns text wrapped to `width` columns.
/// The text is returned as a `Vec<TaggedLine<_>>`; the annotations are vectors
/// of `RichAnnotation`. The "outer" annotation comes first in the `Vec`.
pub fn from_read_rich<R>(mut input: R, width: usize) -> Vec<TaggedLine<Vec<RichAnnotation>>>
where R: io::Read
{
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut input)
.unwrap();
let decorator = RichDecorator::new();
let builder = TextRenderer::new(width, decorator);
let render_tree = dom_to_render_tree(dom.document.clone(), &mut Discard{}).unwrap();
let builder = render_tree_to_string(builder, render_tree, &mut Discard{});
builder.into_lines().into_iter().map(RenderLine::into_tagged_line).collect()
}
#[cfg(test)]
mod tests {
use super::{from_read, from_read_with_decorator, TextDecorator};
use super::render::text_renderer::TrivialDecorator;
/// Like assert_eq!(), but prints out the results normally as well
macro_rules! assert_eq_str {
($a:expr, $b:expr) => {
if $a != $b {
println!("<<<\n{}===\n{}>>>", $a, $b);
assert_eq!($a, $b);
}
}
}
fn test_html(input: &[u8], expected: &str, width: usize) {
assert_eq_str!(from_read(input, width), expected);
}
fn test_html_decorator<D>(input: &[u8], expected: &str, width: usize, decorator: D)
where D: TextDecorator
{
let output = from_read_with_decorator(input, width, decorator);
assert_eq_str!(output, expected);
}
#[test]
fn test_table() {
test_html(br##"
<table>
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
</table>
"##, r#"───┬───┬────
1 │2 │3
───┴───┴────
"#, 12);
}
#[test]
fn test_thead() {
test_html(br##"
<table>
<thead>
<tr>
<th>Col1</th>
<th>Col2</th>
<th>Col3</th>
</tr>
</thead>
<tbody>
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
</tbody>
</table>
"##, r#"────┬────┬─────
Col1│Col2│Col3
────┼────┼─────
1 │2 │3
────┴────┴─────
"#, 15);
}
#[test]
fn test_colspan() {
test_html(br##"
<table>
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
<tr>
<td colspan="2">12</td>
<td>3</td>
</tr>
<tr>
<td>1</td>
<td colspan="2">23</td>
</tr>
</table>
"##, r#"───┬───┬────
1 │2 │3
───┴───┼────
12 │3
───┬───┴────
1 │23
───┴────────
"#, 12);
}
#[test]
fn test_para() {
assert_eq_str!(from_read(&b"<p>Hello</p>"[..], 10),
"Hello\n");
}
#[test]
fn test_para2() {
assert_eq_str!(from_read(&b"<p>Hello, world!</p>"[..], 20),
"Hello, world!\n");
}
#[test]
fn test_blockquote() {
assert_eq_str!(from_read(&br#"<p>Hello</p>
<blockquote>One, two, three</blockquote>
<p>foo</p>
"#[..], 12), r#"Hello
> One, two,
> three
foo
"#);
}
#[test]
fn test_ul() {
test_html(br#"
<ul>
<li>Item one</li>
<li>Item two</li>
<li>Item three</li>
</ul>
"#, r#"* Item one
* Item two
* Item
three
"#, 10);
}
#[test]
fn test_ol1() {
test_html(br#"
<ol>
<li>Item one</li>
<li>Item two</li>
<li>Item three</li>
</ol>
"#, r#"1. Item one
2. Item two
3. Item
three
"#, 11);
}
#[test]
fn test_ol2() {
test_html(br#"
<ol>
<li>Item one</li>
<li>Item two</li>
<li>Item three</li>
<li>Item four</li>
<li>Item five</li>
<li>Item six</li>
<li>Item seven</li>
<li>Item eight</li>
<li>Item nine</li>
<li>Item ten</li>
</ol>
"#, r#"1. Item one
2. Item two
3. Item three
4. Item four
5. Item five
6. Item six
7. Item seven
8. Item eight
9. Item nine
10. Item ten
"#, 20);
}
#[test]
fn test_ol_start() {
test_html(br#"
<ol start="3">
<li>Item three</li>
<li>Item four</li>
</ol>
"#, r#"3. Item three
4. Item four
"#, 20);
}
#[test]
fn test_ol_start_9() {
test_html(br#"
<ol start="9">
<li>Item nine</li>
<li>Item ten</li>
</ol>
"#, r#"9. Item nine
10. Item ten
"#, 20);
}
#[test]
fn test_ol_start_neg() {
test_html(br#"
<ol start="-1">
<li>Item minus one</li>
<li>Item zero</li>
<li>Item one</li>
</ol>
"#, r#"-1. Item minus one
0. Item zero
1. Item one
"#, 20);
}
#[test]
fn test_strip_nl() {
test_html(br#"
<p>
One
Two
Three
</p>
"#, "One Two Three\n", 40);
}
#[test]
fn test_strip_nl2() {
test_html(br#"
<p>
One
<span>
Two
</span>
Three
</p>
"#, "One Two Three\n", 40);
}
#[test]
fn test_strip_nl_tbl() {
test_html(br#"
<table>
<tr>
<td>
One
<span>
Two
</span>
Three
</td>
</tr>
</table>
"#, r"────────────────────
One Two Three
────────────────────
", 20);
}
#[test]
fn test_unknown_element() {
test_html(br#"
<foo>
<table>
<tr>
<td>
One
<span><yyy>
Two
</yyy></span>
Three
</td>
</tr>
</table>
</foo>
"#, r"────────────────────
One Two Three
────────────────────
", 20);
}
#[test]
fn test_strip_nl_tbl_p() {
test_html(br#"
<table>
<tr>
<td><p>
One
<span>
Two
</span>
Three
</p></td>
</tr>
</table>
"#, r"────────────────────
One Two Three
────────────────────
", 20);
}
#[test]
fn test_pre() {
test_html(br#"
<pre>foo
bar
wib asdf;
</pre>
<p>Hello</p>
"#, r"foo
bar
wib asdf;
Hello
", 20);
}
#[test]
fn test_link() {
test_html(br#"
<p>Hello, <a href="http://www.example.com/">world</a></p>"#, r"Hello, [world][1]
[1] http://www.example.com/
", 80);
}
#[test]
fn test_link2() {
test_html(br#"
<p>Hello, <a href="http://www.example.com/">world</a>!</p>"#, r"Hello, [world][1]!
[1] http://www.example.com/
", 80);
}
#[test]
fn test_link3() {
test_html(br#"
<p>Hello, <a href="http://www.example.com/">w</a>orld</p>"#, r"Hello, [w][1]orld
[1] http://www.example.com/
", 80);
}
#[test]
fn test_link_wrap() {
test_html(br#"
<a href="http://www.example.com/">Hello</a>"#, r"[Hello][1]
[1] http:/
/www.examp
le.com/
", 10);
}
#[test]
fn test_wrap() {
test_html(br"<p>Hello, world. Superlongwordreally</p>",
r#"Hello,
world.
Superlon
gwordrea
lly
"#, 8);
}
#[test]
fn test_wrap2() {
test_html(br"<p>Hello, world. This is a long sentence with a
few words, which we want to be wrapped correctly.</p>",
r#"Hello, world. This
is a long sentence
with a few words,
which we want to be
wrapped correctly.
"#, 20);
}
#[test]
fn test_wrap3() {
test_html(br#"<p><a href="dest">http://example.org/blah/</a> one two three"#,
r#"[http://example.org/blah/
][1] one two three
[1] dest
"#, 25);
}
#[test]
fn test_div() {
test_html(br"<p>Hello</p><div>Div</div>",
r#"Hello
Div
"#, 20);
test_html(br"<p>Hello</p><div>Div</div><div>Div2</div>",
r#"Hello
Div
Div2
"#, 20);
}
#[test]
fn test_img_alt() {
test_html(br"<p>Hello <img src='foo.jpg' alt='world'></p>",
"Hello [world]\n", 80);
}
#[test]
fn test_br() {
test_html(br"<p>Hello<br/>World</p>",
"Hello\nWorld\n", 20);
}
#[test]
fn test_br2() {
test_html(br"<p>Hello<br/><br/>World</p>",
"Hello\n\nWorld\n", 20);
}
#[test]
fn test_br3() {
test_html(br"<p>Hello<br/> <br/>World</p>",
"Hello\n\nWorld\n", 20);
}
#[test]
fn test_subblock() {
test_html(br#"<div>
<div>Here's a <a href="https://example.com/">link</a>.</div>
<div><ul>
<li>Bullet</li>
<li>Bullet</li>
<li>Bullet</li>
</ul></div>
</div>"#,
r"Here's a [link][1].
* Bullet
* Bullet
* Bullet
[1] https://example.com/
", 80);
}
#[test]
fn test_controlchar() {
test_html("Foo\u{0080}Bar".as_bytes(), "FooBar\n", 80);
test_html("Foo\u{0080}Bar".as_bytes(), "FooB\nar\n", 4);
test_html("FooBa\u{0080}r".as_bytes(), "FooB\nar\n", 4);
}
#[test]
fn test_nested_table_1() {
test_html(br##"
<table>
<tr>
<td>
<table><tr><td>1</td><td>2</td><td>3</td></tr></table>
</td>
<td>
<table><tr><td>4</td><td>5</td><td>6</td></tr></table>
</td>
<td>
<table><tr><td>7</td><td>8</td><td>9</td></tr></table>
</td>
</tr>
<tr>
<td>
<table><tr><td>1</td><td>2</td><td>3</td></tr></table>
</td>
<td>
<table><tr><td>4</td><td>5</td><td>6</td></tr></table>
</td>
<td>
<table><tr><td>7</td><td>8</td><td>9</td></tr></table>
</td>
</tr>
<tr>
<td>
<table><tr><td>1</td><td>2</td><td>3</td></tr></table>
</td>
<td>
<table><tr><td>4</td><td>5</td><td>6</td></tr></table>
</td>
<td>
<table><tr><td>7</td><td>8</td><td>9</td></tr></table>
</td>
</tr>
</table>
"##, r#"─┬─┬──┬─┬─┬──┬─┬─┬───
1│2│3 │4│5│6 │7│8│9
─┼─┼──┼─┼─┼──┼─┼─┼───
1│2│3 │4│5│6 │7│8│9
─┼─┼──┼─┼─┼──┼─┼─┼───
1│2│3 │4│5│6 │7│8│9
─┴─┴──┴─┴─┴──┴─┴─┴───
"#, 21);
}
#[test]
fn test_nested_table_2() {
test_html(br##"
<table>
<tr>
<td>
<table>
<tr><td>1</td><td>a</td></tr>
<tr><td>2</td><td>b</td></tr>
</table>
</td>
<td><pre>one
two
three
four
five
</pre>
</td>
</tr>
</table>
"##, r#"─┬───┬─────
1│a │one
─┼───│two
2│b │three
│ │four
│ │five
─┴───┴─────
"#, 11);
}
#[test]
fn test_h1() {
test_html(br##"
<h1>Hi</h1>
<p>foo</p>
"##, r#"# Hi
foo
"#, 21);
}
#[test]
fn test_h3() {
test_html(br##"
<h3>Hi</h3>
<p>foo</p>
"##, r#"### Hi
foo
"#, 21);
}
// General test that spacing is preserved
#[test]
fn test_pre2() {
test_html(br##"<pre>Hello sp
world</pre>"##, r#"Hello sp
world
"#, 21);
}
// Check that spans work correctly inside <pre>
#[test]
fn test_pre_span() {
test_html(br##"
<pre>Hello <span>$</span>sp
<span>Hi</span> <span>$</span><span>foo</span>
<span>Hi</span> <span>foo</span><span>, </span><span>bar</span>
</pre>"##, r#"Hello $sp
Hi $foo
Hi foo, bar
"#, 21);
}
// Check tab behaviour
#[test]
fn test_pre_tab() {
test_html(b"<pre>\tworld</pre>", " world\n", 40);
test_html(b"<pre>H\tworld</pre>", "H world\n", 40);
test_html(b"<pre>He\tworld</pre>", "He world\n", 40);
test_html(b"<pre>Hel\tworld</pre>", "Hel world\n", 40);
test_html(b"<pre>Hell\tworld</pre>", "Hell world\n", 40);
test_html(b"<pre>Hello\tworld</pre>", "Hello world\n", 40);
test_html(b"<pre>Helloo\tworld</pre>", "Helloo world\n", 40);
test_html(b"<pre>Hellooo\tworld</pre>", "Hellooo world\n", 40);
test_html(b"<pre>Helloooo\tworld</pre>", "Helloooo world\n", 40);
}
#[test]
fn test_em_strong() {
test_html(br##"
<p>Hi <em>em</em> <strong>strong</strong></p>
"##, r#"Hi *em* **strong**
"#, 21);
}
#[test]
#[ignore] // Not yet fixed!
fn test_nbsp_indent() {
test_html(br##"
<div>Top</div>
<div> Indented</div>
<div> Indented again</div>
"##, r#"Top
Indented
Indented again
"#, 21);
}
#[test]
fn test_deeply_nested() {
use ::std::iter::repeat;
let html = repeat("<foo>")
.take(1000)
.collect::<Vec<_>>()
.concat();
test_html(html.as_bytes(), "", 10);
}
#[test]
fn test_deeply_nested_table() {
use ::std::iter::repeat;
let html = repeat("<table><tr><td>hi</td><td>")
.take(1000)
.collect::<Vec<_>>()
.concat()
+ &repeat("</td></tr></table>")
.take(1000)
.collect::<Vec<_>>()
.concat();
test_html(html.as_bytes(), r#"────┬─┬───
hi │h│
│i│
────┴─┴───
"#, 10);
}
#[test]
fn test_table_no_id() {
let html = r#"<html><body><table>
<tr>
<td>hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_table_cell_id() {
let html = r#"<html><body><table>
<tr>
<td id="bodyCell">hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_table_row_id() {
let html = r#"<html><body><table>
<tr id="bodyrow">
<td>hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_table_table_id() {
let html = r#"<html><body><table id="bodytable">
<tr>
<td>hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_header_width() {
//0 size
test_html(
br##"
<h2>
<table>
<h3>Anything</h3>
</table>
</h2>
"##,
r#"## ### A
## ### n
## ### y
## ### t
## ### h
## ### i
## ### n
## ### g
##
## ────
"#,
7,
);
//Underflow
test_html(
br##"
<h2>
<table>
<h3>Anything</h3>
</table>
</h2>
"##,
r#"## ### A
## ### n
## ### y
## ### t
## ### h
## ### i
## ### n
## ### g
##
## ──
"#,
5,
);
}
#[test]
fn test_trivial_decorator() {
test_html_decorator(br#"<div>
<div>Here's a <a href="https://example.com/">link</a>.</div>
<div><ul>
<li>Bullet</li>
<li>Bullet</li>
<li>Bullet</li>
</ul></div>
</div>"#,
r"Here's a link.
* Bullet
* Bullet
* Bullet
", 80, TrivialDecorator::new());
}
#[test]
fn test_issue_16()
{
test_html(b"<ul><li><!----></li></ul>", "", 10);
}
}
Work towards making the size estimation non-recursive.
Some lifetime issues at the moment.
//! Convert HTML to text formats.
//!
//! This crate renders HTML into a text format, wrapped to a specified width.
//! This can either be plain text or with extra annotations to (for example)
//! show in a terminal which supports colours.
//!
//! # Examples
//!
//! ```rust
//! # use html2text::from_read;
//! let html = b"
//! <ul>
//! <li>Item one</li>
//! <li>Item two</li>
//! <li>Item three</li>
//! </ul>";
//! assert_eq!(from_read(&html[..], 20),
//! "\
//! * Item one
//! * Item two
//! * Item three
//! ");
//! ```
//! A couple of simple demonstration programs are included as examples:
//!
//! ### html2text
//!
//! The simplest example uses `from_read` to convert HTML on stdin into plain
//! text:
//!
//! ```sh
//! $ cargo run --example html2text < foo.html
//! [...]
//! ```
//!
//! ### html2term
//!
//! A very simple example of using the rich interface (`from_read_rich`) for a
//! slightly interactive console HTML viewer is provided as `html2term`.
//!
//! ```sh
//! $ cargo run --example html2term foo.html
//! [...]
//! ```
//!
//! Note that this example takes the HTML file as a parameter so that it can
//! read keys from stdin.
//!
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
#![deny(missing_docs)]
extern crate html5ever_atoms;
#[macro_use] extern crate html5ever;
extern crate unicode_width;
#[macro_use]
mod macros;
pub mod render;
use render::Renderer;
use render::text_renderer::{TextRenderer,
TextDecorator,PlainDecorator,RichDecorator,
RichAnnotation,TaggedLine,RenderLine};
use std::io;
use std::io::Write;
use std::cmp::max;
use std::iter::{once,repeat};
use std::ops::{Deref,DerefMut};
use std::cell::Cell;
use html5ever::{parse_document};
use html5ever::driver::ParseOpts;
use html5ever::tree_builder::TreeBuilderOpts;
use html5ever::rcdom::{self,RcDom,Handle,NodeData::{Text,Element,Document,Comment}};
use html5ever::tendril::TendrilSink;
/// A dummy writer which does nothing
struct Discard {}
impl Write for Discard {
fn write(&mut self, bytes: &[u8]) -> std::result::Result<usize, io::Error> { Ok(bytes.len()) }
fn flush(&mut self) -> std::result::Result<(), io::Error> { Ok(()) }
}
fn get_text(handle: Handle) -> String {
let node = &*handle;
let mut result = String::new();
if let Text { contents: ref tstr } = node.data {
result.push_str(&tstr.borrow());
} else {
for child in &*node.children.borrow() {
result.push_str(&get_text(child.clone()));
}
}
result
}
const MIN_WIDTH: usize = 5;
/// Size information/estimate
#[derive(Debug,Copy,Clone)]
pub struct SizeEstimate {
size: usize, // Rough overall size
min_width: usize, // The narrowest possible
}
impl Default for SizeEstimate {
fn default() -> SizeEstimate {
SizeEstimate {
size: 0,
min_width: 0,
}
}
}
impl SizeEstimate {
/// Combine two estimates into one (add size and widest required)
pub fn add(self, other: SizeEstimate) -> SizeEstimate {
SizeEstimate {
size: self.size + other.size,
min_width: max(self.min_width, other.min_width),
}
}
}
#[derive(Debug)]
/// Render tree table cell
pub struct RenderTableCell {
colspan: usize,
content: Vec<RenderNode>,
size_estimate: Option<SizeEstimate>,
col_width: Option<usize>, // Actual width to use
}
impl RenderTableCell {
/// Render this cell to a builder.
pub fn render<T:Write, R:Renderer>(&mut self, _builder: &mut R, _err_out: &mut T)
{
unimplemented!()
//render_tree_children_to_string(builder, &mut self.content, err_out)
}
/// Calculate or return the estimate size of the cell
pub fn get_size_estimate(&mut self) -> SizeEstimate {
if self.size_estimate.is_none() {
let size = self.content
.iter_mut()
.map(|node| node.get_size_estimate())
.fold(Default::default(), SizeEstimate::add);
self.size_estimate = Some(size);
}
self.size_estimate.unwrap()
}
}
#[derive(Debug)]
/// Render tree table row
pub struct RenderTableRow {
cells: Vec<RenderTableCell>,
col_sizes: Option<Vec<usize>>,
}
impl RenderTableRow {
/// Return a mutable iterator over the cells.
pub fn cells(&mut self) -> std::slice::IterMut<RenderTableCell> {
self.cells.iter_mut()
}
/// Count the number of cells in the row.
/// Takes into account colspan.
pub fn num_cells(&self) -> usize {
self.cells.iter().map(|cell| cell.colspan).sum()
}
/// Return an iterator over (column, &cell)s, which
/// takes into account colspan.
pub fn cell_columns(&mut self) -> Vec<(usize, &mut RenderTableCell)> {
let mut result = Vec::new();
let mut colno = 0;
for cell in &mut self.cells {
let colspan = cell.colspan;
result.push((colno, cell));
colno += colspan;
}
result
}
/// Return the contained cells as RenderNodes, annotated with their
/// widths if available. Skips cells with no width allocated.
pub fn into_cells(self) -> Vec<RenderNode> {
let mut result = Vec::new();
let mut colno = 0;
let col_sizes = self.col_sizes.unwrap();
for mut cell in self.cells {
let colspan = cell.colspan;
let col_width: usize = col_sizes[colno..colno+cell.colspan].iter().sum();
if col_width > 1 {
cell.col_width = Some(col_width - 1);
result.push(RenderNode::new(RenderNodeInfo::TableCell(cell)));
}
colno += colspan;
}
result
}
}
#[derive(Debug)]
/// A representation of a table render tree with metadata.
pub struct RenderTable {
rows: Vec<RenderTableRow>,
num_columns: usize,
size_estimate: Option<SizeEstimate>,
}
impl RenderTable {
/// Create a new RenderTable with the given rows
pub fn new(rows: Vec<RenderTableRow>) -> RenderTable {
let num_columns = rows.iter()
.map(|r| r.num_cells()).max().unwrap_or(0);
RenderTable {
rows: rows,
num_columns: num_columns,
size_estimate: None,
}
}
/// Return an iterator over the rows.
pub fn rows(&mut self) -> std::slice::IterMut<RenderTableRow> {
self.rows.iter_mut()
}
/// Consume this and return a Vec<RenderNode> containing the children;
/// the children know the column sizes required.
pub fn into_rows(self, col_sizes: Vec<usize>) -> Vec<RenderNode> {
self.rows
.into_iter()
.map(|mut tr| {
tr.col_sizes = Some(col_sizes.clone());
RenderNode::new(RenderNodeInfo::TableRow(tr))
})
.collect()
}
fn calc_size_estimate(&mut self) {
if self.num_columns == 0 {
self.size_estimate = Some(SizeEstimate { size: 0, min_width: 0 });
return;
}
let mut sizes: Vec<SizeEstimate> = vec![Default::default(); self.num_columns];
// For now, a simple estimate based on adding up sub-parts.
for row in self.rows() {
let mut colno = 0usize;
for cell in row.cells() {
let cellsize = cell.get_size_estimate();
for colnum in 0..cell.colspan {
sizes[colno + colnum].size += cellsize.size / cell.colspan;
sizes[colno + colnum].min_width = max(sizes[colno+colnum].min_width/cell.colspan, cellsize.min_width);
}
colno += cell.colspan;
}
}
let size = sizes.iter().map(|s| s.size).sum(); // Include borders?
let min_width = sizes.iter().map(|s| s.min_width).sum::<usize>() + self.num_columns-1;
self.size_estimate = Some(SizeEstimate { size: size, min_width: min_width });
}
/// Calculate and store (or return stored value) of estimated size
pub fn get_size_estimate(&mut self) -> SizeEstimate {
if self.size_estimate.is_none() {
self.calc_size_estimate();
}
self.size_estimate.unwrap()
}
}
/// The node-specific information distilled from the DOM.
#[derive(Debug)]
pub enum RenderNodeInfo {
/// Some text.
Text(String),
/// A group of nodes collected together.
Container(Vec<RenderNode>),
/// A link with contained nodes
Link(String, Vec<RenderNode>),
/// An emphasised region
Em(Vec<RenderNode>),
/// A strong region
Strong(Vec<RenderNode>),
/// A code region
Code(Vec<RenderNode>),
/// An image (title)
Img(String),
/// A block element with children
Block(Vec<RenderNode>),
/// A header (h1, h2, ...) with children
Header(usize, Vec<RenderNode>),
/// A Div element with children
Div(Vec<RenderNode>),
/// A preformatted region.
Pre(String),
/// A blockquote
BlockQuote(Vec<RenderNode>),
/// An unordered list
Ul(Vec<RenderNode>),
/// An ordered list
Ol(i64, Vec<RenderNode>),
/// A line break
Break,
/// A table
Table(RenderTable),
/// A set of table rows (from either <thead> or <tbody>
TableBody(Vec<RenderTableRow>),
/// Table row (must only appear within a table body)
TableRow(RenderTableRow),
/// Table cell (must only appear within a table row)
TableCell(RenderTableCell),
/// Start of a named HTML fragment
FragStart(String),
}
/// Common fields from a node.
#[derive(Debug)]
pub struct RenderNode {
size_estimate: Cell<Option<SizeEstimate>>,
info: RenderNodeInfo,
}
impl RenderNode {
/// Create a node from the RenderNodeInfo.
pub fn new(info: RenderNodeInfo) -> RenderNode {
RenderNode {
size_estimate: Cell::new(None),
info: info,
}
}
/// Get a size estimate (~characters)
pub fn get_size_estimate(&self) -> SizeEstimate {
// If it's already calculated, then just return the answer.
if let Some(s) = self.size_estimate.get() {
return s;
};
use RenderNodeInfo::*;
// Otherwise, make an estimate.
let estimate = match self.info {
Text(ref t) |
Img(ref t) |
Pre(ref t) => {
let len = t.trim().len();
SizeEstimate {
size: len,
min_width: if len > 0 { MIN_WIDTH } else { 0 },
}
},
Container(ref mut v) |
Link(_, ref mut v) |
Em(ref mut v) |
Strong(ref mut v) |
Code(ref mut v) |
Block(ref mut v) |
Div(ref mut v) |
BlockQuote(ref mut v) |
Ul(ref mut v) |
Ol(_, ref mut v) => {
v.iter()
.map(RenderNode::get_size_estimate)
.fold(Default::default(), SizeEstimate::add)
},
Header(level, ref mut v) => {
v.iter()
.map(RenderNode::get_size_estimate)
.fold(Default::default(), SizeEstimate::add).add(SizeEstimate {size:0, min_width: MIN_WIDTH+level+2})
},
Break => SizeEstimate { size: 1, min_width: 1 },
Table(ref mut t) => {
t.get_size_estimate()
},
TableRow(_)|TableBody(_)|TableCell(_) => {
unimplemented!()
},
FragStart(_) => Default::default(),
};
self.size_estimate.set(Some(estimate));
estimate
}
}
fn precalc_size_estimate<'a>(node: &'a RenderNode) -> TreeMapResult<(), &'a RenderNode, ()> {
use RenderNodeInfo::*;
if node.size_estimate.get().is_some() {
return TreeMapResult::Nothing;
}
match node.info {
Text(_) |
Img(_) |
Pre(_) |
Break |
FragStart(_) => {
let _ = node.get_size_estimate();
TreeMapResult::Nothing
},
Container(ref v) |
Link(_, ref v) |
Em(ref v) |
Strong(ref v) |
Code(ref v) |
Block(ref v) |
Div(ref v) |
BlockQuote(ref v) |
Ul(ref v) |
Ol(_, ref v) |
Header(_, ref v) => {
TreeMapResult::PendingChildren {
children: v.iter().collect(),
cons: Box::new(move |_, cs| {
node.get_size_estimate();
None
}),
prefn: None,
postfn: None,
}
},
Table(ref t) => {
/* Return all the indirect children which are RenderNodes. */
let mut children = Vec::new();
for row in t.rows {
for cell in row.cells {
children.extend(cell.content.iter());
}
}
TreeMapResult::PendingChildren {
children: children,
cons: Box::new(move |_, cs| {
node.get_size_estimate();
None
}),
prefn: None,
postfn: None,
}
},
TableRow(_)|TableBody(_)|TableCell(_) => {
unimplemented!()
},
}
}
/// Make a Vec of RenderNodes from the children of a node.
fn children_to_render_nodes<T:Write>(handle: Handle, err_out: &mut T) -> Vec<RenderNode> {
/* process children, but don't add anything */
let children = handle.children
.borrow()
.iter()
.flat_map(|ch| dom_to_render_tree(ch.clone(), err_out))
.collect();
children
}
/// Make a Vec of RenderNodes from the <li>children of a node.
fn list_children_to_render_nodes<T:Write>(handle: Handle, err_out: &mut T) -> Vec<RenderNode> {
let mut children = Vec::new();
for child in handle.children.borrow().iter() {
match child.data {
Element { ref name, .. } => {
match name.expanded() {
expanded_name!(html "li") => {
let li_children = children_to_render_nodes(child.clone(), err_out);
children.push(RenderNode::new(RenderNodeInfo::Block(li_children)));
},
_ => {},
}
},
Comment { .. } => {},
_ => { html_trace!("Unhandled in list: {:?}\n", child); },
}
}
children
}
/// Convert a table into a RenderNode
fn table_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
pending(handle, |_,rowset| {
let mut rows = vec![];
for bodynode in rowset {
if let RenderNodeInfo::TableBody(body) = bodynode.info {
rows.extend(body);
} else {
html_trace!("Found in table: {:?}", bodynode.info);
}
}
Some(RenderNode::new(RenderNodeInfo::Table(RenderTable::new(rows))))
})
}
/// Add rows from a thead or tbody.
fn tbody_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
pending(handle, |_,rowchildren| {
let rows = rowchildren.into_iter()
.flat_map(|rownode| {
if let RenderNodeInfo::TableRow(row) = rownode.info {
Some(row)
} else {
html_trace!(" [[tbody child: {:?}]]", rownode);
None
}})
.collect();
Some(RenderNode::new(RenderNodeInfo::TableBody(rows)))
})
}
/// Convert a table row to a RenderTableRow
fn tr_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
pending(handle, |_, cellnodes| {
let cells = cellnodes.into_iter()
.flat_map(|cellnode| {
if let RenderNodeInfo::TableCell(cell) = cellnode.info {
Some(cell)
} else {
html_trace!(" [[tr child: {:?}]]", cellnode);
None
}})
.collect();
Some(RenderNode::new(RenderNodeInfo::TableRow(RenderTableRow{cells, col_sizes: None})))
})
}
/// Convert a single table cell to a render node.
fn td_to_render_tree<T:Write>(handle: Handle, _err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
let mut colspan = 1;
if let Element { ref attrs, .. } = handle.data {
for attr in attrs.borrow().iter() {
if &attr.name.local == "colspan" {
let v:&str = &*attr.value;
colspan = v.parse().unwrap_or(1);
}
}
}
pending(handle, move |_, children| {
Some(RenderNode::new(RenderNodeInfo::TableCell(RenderTableCell {
colspan: colspan,
content: children,
size_estimate: None,
col_width: None,
})))
})
}
/// A reducer which combines results from mapping children into
/// the result for the current node. Takes a context and a
/// vector of results and returns a new result (or nothing).
type ResultReducer<'a, C, R> = dyn Fn(&mut C, Vec<R>) -> Option<R>+'a;
/// A closure to call before processing a child node.
type ChildPreFn<C, N> = dyn Fn(&mut C, &N);
/// A closure to call after processing a child node,
/// before adding the result to the processed results
/// vector.
type ChildPostFn<C, R> = dyn Fn(&mut C, &R);
/// The result of trying to render one node.
enum TreeMapResult<'a, C, N, R> {
/// A completed result.
Finished(R),
/// Deferred completion - can be turned into a result
/// once the vector of children are processed.
PendingChildren {
children: Vec<N>,
cons: Box<ResultReducer<'a, C, R>>,
prefn: Option<Box<ChildPreFn<C, N>>>,
postfn: Option<Box<ChildPostFn<C, R>>>,
},
/// Nothing (e.g. a comment or other ignored element).
Nothing
}
fn tree_map_reduce<C, N, R, M>(context: &mut C,
top: N,
mut process_node: M) -> Option<R>
where M: FnMut(&mut C, N) -> TreeMapResult<C, N, R>,
{
/// A node partially decoded, waiting for its children to
/// be processed.
struct PendingNode<'a, C, R, N> {
/// How to make the node once finished
construct: Box<ResultReducer<'a, C, R>>,
/// Called before processing each child
prefn: Option<Box<ChildPreFn<C, N>>>,
/// Called after processing each child
postfn: Option<Box<ChildPostFn<C, R>>>,
/// Children already processed
children: Vec<R>,
/// Iterator of child nodes not yet processed
to_process: std::vec::IntoIter<N>,
}
let mut pending_stack = vec![
PendingNode {
// We only expect one child, which we'll just return.
construct: Box::new(|_, mut cs| cs.pop()),
prefn: None,
postfn: None,
children: Vec::new(),
to_process: vec![top].into_iter(),
}
];
loop {
// Get the next child node to process
let next_node = pending_stack.last_mut()
.unwrap()
.to_process
.next();
if let Some(h) = next_node {
pending_stack.last_mut().unwrap().prefn.as_ref().map(|ref f| f(context, &h));
match process_node(context, h) {
TreeMapResult::Finished(result) => {
pending_stack.last_mut().unwrap().postfn.as_ref().map(|ref f| f(context, &result));
pending_stack.last_mut().unwrap().children.push(result);
}
TreeMapResult::PendingChildren { children, cons, prefn, postfn } => {
pending_stack.push(PendingNode {
construct: cons,
prefn,
postfn,
children: Vec::new(),
to_process: children.into_iter(),
});
},
TreeMapResult::Nothing => {},
};
} else {
// No more children, so finally construct the parent.
let completed = pending_stack.pop().unwrap();
let reduced = (completed.construct)(context, completed.children);
if let Some(node) = reduced {
if let Some(parent) = pending_stack.last_mut() {
parent.postfn.as_ref().map(|ref f| f(context, &node));
parent.children.push(node);
} else {
// Finished the whole stack!
break Some(node);
}
} else {
/* Finished the stack, and have nothing */
if pending_stack.is_empty() {
break None;
}
}
}
}
}
/// Convert a DOM tree or subtree into a render tree.
pub fn dom_to_render_tree<T:Write>(handle: Handle, err_out: &mut T) -> Option<RenderNode> {
html_trace!("### dom_to_render_tree: HTML: {:?}", handle);
let result = tree_map_reduce(&mut (), handle,
|_, handle| process_dom_node(handle, err_out),
);
html_trace!("### dom_to_render_tree: out= {:#?}", result);
result
}
fn pending<'a, F>(handle: Handle, f: F) -> TreeMapResult<'a, (), Handle, RenderNode>
where //for<'a> F: Fn(&'a mut C, Vec<RenderNode>) -> Option<RenderNode>+'static
for<'r> F: Fn(&'r mut (), std::vec::Vec<RenderNode>) -> Option<RenderNode>+'static
{
TreeMapResult::PendingChildren {
children: handle.children.borrow().clone(),
cons: Box::new(f),
prefn: None,
postfn: None
}
}
/// Prepend a FragmentStart (or analogous) marker to an existing
/// RenderNode.
fn prepend_marker(prefix: RenderNode, mut orig: RenderNode) -> RenderNode {
use RenderNodeInfo::*;
match orig.info {
// For block elements such as Block and Div, we need to insert
// the node at the front of their children array, otherwise
// the renderer is liable to drop the fragment start marker
// _before_ the new line indicating the end of the previous
// paragraph.
//
// For Container, we do the same thing just to make the data
// less pointlessly nested.
Block(ref mut children) |
Div(ref mut children) |
BlockQuote(ref mut children) |
Container(ref mut children) |
TableCell(RenderTableCell { content: ref mut children, .. }) => {
children.insert(0, prefix);
// Now return orig, but we do that outside the match so
// that we've given back the borrowed ref 'children'.
},
// For table rows and tables, push down if there's any content.
TableRow(ref mut rrow) => {
// If the row is empty, then there isn't really anything
// to attach the fragment start to.
if rrow.cells.len() > 0 {
rrow.cells[0].content.insert(0, prefix);
}
},
Table(ref mut rtable) => {
// If the row is empty, then there isn't really anything
// to attach the fragment start to.
if rtable.rows.len() > 0 {
let rrow = &mut rtable.rows[0];
if rrow.cells.len() > 0 {
rrow.cells[0].content.insert(0, prefix);
}
}
},
// For anything else, just make a new Container with the
// prefix node and the original one.
_ => {
return RenderNode::new(Container(vec![prefix, orig]));
},
}
orig
}
fn process_dom_node<T:Write>(handle: Handle, err_out: &mut T) -> TreeMapResult<(), Handle, RenderNode> {
use TreeMapResult::*;
use RenderNodeInfo::*;
match handle.clone().data {
Document => pending(handle, |&mut (), cs| Some(RenderNode::new(Container(cs)))),
Comment { .. } => Nothing,
Element { ref name, ref attrs, .. } => {
let mut frag_from_name_attr = false;
let result = match name.expanded() {
expanded_name!(html "html") |
expanded_name!(html "span") |
expanded_name!(html "body") => {
/* process children, but don't add anything */
pending(handle, |_,cs| Some(RenderNode::new(Container(cs))))
},
expanded_name!(html "link") |
expanded_name!(html "meta") |
expanded_name!(html "hr") |
expanded_name!(html "script") |
expanded_name!(html "style") |
expanded_name!(html "head") => {
/* Ignore the head and its children */
Nothing
},
expanded_name!(html "a") => {
let borrowed = attrs.borrow();
let mut target = None;
frag_from_name_attr = true;
for attr in borrowed.iter() {
if &attr.name.local == "href" {
target = Some(&*attr.value);
break;
}
}
PendingChildren{
children: handle.children.borrow().clone(),
cons: if let Some(href) = target {
// We need the closure to own the string it's going to use.
// Unfortunately that means we ideally want FnOnce; but
// that doesn't yet work in a Box. Box<FnBox()> does, but
// is unstable. So we'll just move a string in and clone
// it on use.
let href: String = href.into();
Box::new(move |_, cs| Some(RenderNode::new(Link(href.clone(), cs))))
} else {
Box::new(|_, cs| Some(RenderNode::new(Container(cs))))
},
prefn: None, postfn: None,
}
},
expanded_name!(html "em") => {
pending(handle, |_, cs| Some(RenderNode::new(Em(cs))))
},
expanded_name!(html "strong") => {
pending(handle, |_, cs| Some(RenderNode::new(Strong(cs))))
},
expanded_name!(html "code") => {
pending(handle, |_, cs| Some(RenderNode::new(Code(cs))))
},
expanded_name!(html "img") => {
let borrowed = attrs.borrow();
let mut title = None;
for attr in borrowed.iter() {
if &attr.name.local == "alt" {
title = Some(&*attr.value);
break;
}
}
if let Some(title) = title {
Finished(RenderNode::new(Img(title.into())))
} else {
Nothing
}
},
expanded_name!(html "h1") |
expanded_name!(html "h2") |
expanded_name!(html "h3") |
expanded_name!(html "h4") => {
let level: usize = name.local[1..].parse().unwrap();
pending(handle, move |_, cs| Some(RenderNode::new(Header(level, cs))))
},
expanded_name!(html "p") => {
pending(handle, |_, cs| Some(RenderNode::new(Block(cs))))
},
expanded_name!(html "div") => {
pending(handle, |_, cs| Some(RenderNode::new(Div(cs))))
},
expanded_name!(html "pre") => {
Finished(RenderNode::new(Pre(get_text(handle))))
},
expanded_name!(html "br") => {
Finished(RenderNode::new(Break))
}
expanded_name!(html "table") => {
table_to_render_tree(handle.clone(), err_out)
},
expanded_name!(html "thead") |
expanded_name!(html "tbody") => {
tbody_to_render_tree(handle.clone(), err_out)
},
expanded_name!(html "tr") => {
tr_to_render_tree(handle.clone(), err_out)
},
expanded_name!(html "th") |
expanded_name!(html "td") => {
td_to_render_tree(handle.clone(), err_out)
}
expanded_name!(html "blockquote") => {
pending(handle, |_, cs| Some(RenderNode::new(BlockQuote(cs))))
},
expanded_name!(html "ul") => {
Finished(RenderNode::new(Ul(list_children_to_render_nodes(handle.clone(), err_out))))
},
expanded_name!(html "ol") => {
let borrowed = attrs.borrow();
let mut start = 1;
for attr in borrowed.iter() {
if &attr.name.local == "start" {
start = attr.value.parse().ok().unwrap_or(1);
break;
}
}
Finished(RenderNode::new(Ol(start, list_children_to_render_nodes(handle.clone(), err_out))))
},
_ => {
html_trace!("Unhandled element: {:?}\n", name.local);
pending(handle, |_, cs| Some(RenderNode::new(Container(cs))))
//None
},
};
let mut fragment = None;
let borrowed = attrs.borrow();
for attr in borrowed.iter() {
if &attr.name.local == "id" ||
(frag_from_name_attr && &attr.name.local == "name")
{
fragment = Some(attr.value.to_string());
break;
}
}
if let Some(fragname) = fragment {
match result {
Finished(node) => Finished(prepend_marker(RenderNode::new(FragStart(fragname)), node)),
Nothing => Finished(RenderNode::new(FragStart(fragname))),
PendingChildren{children, cons, prefn, postfn} => {
let fragname: String = fragname.into();
PendingChildren {
children: children,
prefn: prefn,
postfn: postfn,
cons: Box::new(move |ctx,ch| {
let fragnode = RenderNode::new(FragStart(fragname.clone()));
match cons(ctx,ch) {
None => Some(fragnode),
Some(node) => Some(prepend_marker(fragnode, node)),
}
}),
}
},
}
} else {
result
}
},
rcdom::NodeData::Text { contents: ref tstr } => {
Finished(RenderNode::new(Text((&*tstr.borrow()).into())))
}
_ => {
// NodeData doesn't have a Debug impl.
write!(err_out, "Unhandled node type.\n").unwrap();
Nothing
},
}
}
/// Context to use during tree parsing.
/// This mainly gives access to a Renderer, but needs to be able to push
/// new ones on for nested structures.
struct BuilderStack<R:Renderer> {
builders: Vec<R>,
}
impl<R:Renderer> BuilderStack<R> {
pub fn new(builder: R) -> BuilderStack<R> {
BuilderStack {
builders: vec![builder],
}
}
/// Push a new builder onto the stack
pub fn push(&mut self, builder: R) {
self.builders.push(builder);
}
/// Pop off the top builder and return it.
/// Panics if empty
pub fn pop(&mut self) -> R {
self.builders.pop().unwrap()
}
/// Pop off the only builder and return it.
/// panics if there aren't exactly 1 available.
pub fn into_inner(mut self) -> R {
assert_eq!(self.builders.len(), 1);
self.builders.pop().unwrap()
}
}
impl<R:Renderer> Deref for BuilderStack<R> {
type Target = R;
fn deref(&self) -> &R {
self.builders.last().expect("Underflow in BuilderStack")
}
}
impl<R:Renderer> DerefMut for BuilderStack<R> {
fn deref_mut(&mut self) -> &mut R {
self.builders.last_mut().expect("Underflow in BuilderStack")
}
}
fn render_tree_to_string<T:Write, R:Renderer>(builder: R, tree: RenderNode,
err_out: &mut T) -> R {
/* Phase 1: get size estimates. */
tree_map_reduce(&mut (), tree,
|_, ref node| precalc_size_estimate(node));
/* Phase 2: actually render. */
let mut bs = BuilderStack::new(builder);
tree_map_reduce(&mut bs, tree,
|builders, node| do_render_node(builders, node, err_out),
);
bs.into_inner()
}
fn pending2<'a, R: Renderer, F: Fn(&mut BuilderStack<R>, Vec<Option<R>>) -> Option<Option<R>> + 'static>(children: Vec<RenderNode>, f: F) -> TreeMapResult<'a, BuilderStack<R>, RenderNode, Option<R>> {
TreeMapResult::PendingChildren{
children: children,
cons: Box::new(f),
prefn: None,
postfn: None
}
}
fn do_render_node<'a, 'b, T: Write, R: Renderer>(builder: &mut BuilderStack<R>,
tree: RenderNode,
err_out: &'b mut T)
-> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>>
{
use TreeMapResult::*;
use RenderNodeInfo::*;
match tree.info {
Text(ref tstr) => {
builder.add_inline_text(tstr);
Finished(None)
},
Container(children) => {
pending2(children, |_, _| Some(None))
},
Link(href, children) => {
builder.start_link(&href);
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_link();
Some(None)
})
},
Em(children) => {
builder.start_emphasis();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_emphasis();
Some(None)
})
},
Strong(children) => {
builder.start_strong();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_strong();
Some(None)
})
},
Code(children) => {
builder.start_code();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_code();
Some(None)
})
},
Img(title) => {
builder.add_image(&title);
Finished(None)
},
Block(children) => {
builder.start_block();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.end_block();
Some(None)
})
},
Header(level, children) => {
let min_width = max(builder.width(), 1 + level + 1);
let sub_builder = builder.new_sub_renderer(min_width - (1 + level));
builder.push(sub_builder);
pending2(children, move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
let qs: String = "#".repeat(level) + " ";
builder.start_block();
builder.append_subrender(sub_builder, repeat(&qs[..]));
builder.end_block();
Some(None)
})
},
Div(children) => {
builder.new_line();
pending2(children, |builder:&mut BuilderStack<R>, _| {
builder.new_line();
Some(None)
})
},
Pre(ref formatted) => {
builder.add_preformatted_block(formatted);
Finished(None)
},
BlockQuote(children) => {
let sub_builder = builder.new_sub_renderer(builder.width()-2);
builder.push(sub_builder);
pending2(children, |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
builder.start_block();
builder.append_subrender(sub_builder, repeat("> "));
builder.end_block();
Some(None)
})
},
Ul(items) => {
builder.start_block();
TreeMapResult::PendingChildren{
children: items,
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(|builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.new_sub_renderer(builder.width()-2);
builder.push(sub_builder);
})),
postfn: Some(Box::new(|builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
builder.append_subrender(sub_builder, once("* ").chain(repeat(" ")));
})),
}
},
Ol(start, items) => {
builder.start_block();
let num_items = items.len();
// The prefix width could be at either end if the start is negative.
let min_number = start;
// Assumption: num_items can't overflow isize.
let max_number = start + (num_items as i64) - 1;
let prefix_width_min = format!("{}", min_number).len() + 2;
let prefix_width_max = format!("{}", max_number).len() + 2;
let prefix_width = max(prefix_width_min, prefix_width_max);
let prefixn = format!("{: <width$}", "", width=prefix_width);
let i: Cell<_> = Cell::new(start);
TreeMapResult::PendingChildren{
children: items,
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.new_sub_renderer(builder.width()-prefix_width);
builder.push(sub_builder);
})),
postfn: Some(Box::new(move |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
let prefix1 = format!("{}.", i.get());
let prefix1 = format!("{: <width$}", prefix1, width=prefix_width);
builder.append_subrender(sub_builder, once(prefix1.as_str()).chain(repeat(prefixn.as_str())));
i.set(i.get() + 1);
})),
}
},
Break => {
builder.new_line_hard();
Finished(None)
},
Table(tab) => {
render_table_tree(builder.deref_mut(), tab, err_out)
},
TableRow(row) => {
render_table_row(builder.deref_mut(), row, err_out)
},
TableBody(_) => {
unimplemented!("Unexpected TableBody while rendering")
},
TableCell(cell) => {
render_table_cell(builder.deref_mut(), cell, err_out)
},
FragStart(fragname) => {
builder.record_frag_start(&fragname);
Finished(None)
},
}
}
fn render_table_tree<T:Write, R:Renderer>(builder: &mut R, mut table: RenderTable, _err_out: &mut T) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>>
{
/* Now lay out the table. */
let num_columns = table.num_columns;
/* Heuristic: scale the column widths according to how much content there is. */
let mut col_sizes: Vec<SizeEstimate> = vec![Default::default(); num_columns];
for row in table.rows() {
let mut colno = 0;
for cell in row.cells() {
// FIXME: get_size_estimate is still recursive.
let mut estimate = cell.get_size_estimate();
// If the cell has a colspan>1, then spread its size between the
// columns.
estimate.size /= cell.colspan;
estimate.min_width /= cell.colspan;
for i in 0..cell.colspan {
col_sizes[colno + i] = (col_sizes[colno + i]).add(estimate);
}
colno += cell.colspan;
}
}
let tot_size: usize = col_sizes.iter().map(|est| est.size).sum();
let width = builder.width();
let mut col_widths:Vec<usize> = col_sizes.iter()
.map(|sz| {
if sz.size == 0 {
0
} else {
max(sz.size * width / tot_size, sz.min_width)
}
}).collect();
/* The minimums may have put the total width too high */
while col_widths.iter().cloned().sum::<usize>() > width {
let (i, _) = col_widths.iter()
.cloned()
.enumerate()
.max_by_key(|&(colno, width)| (width.saturating_sub(col_sizes[colno].min_width), width, usize::max_value() - colno ))
.unwrap();
col_widths[i] -= 1;
}
if !col_widths.is_empty() {
// Slight fudge; we're not drawing extreme edges, so one of the columns
// can gets a free character cell from not having a border.
// make it the last.
let last = col_widths.len() - 1;
col_widths[last] += 1;
}
builder.start_block();
builder.add_horizontal_border();
TreeMapResult::PendingChildren{
children: table.into_rows(col_widths),
cons: Box::new(|_, _| Some(None)),
prefn: Some(Box::new(|_, _| { })),
postfn: Some(Box::new(|_, _| { })),
}
}
fn render_table_row<T:Write, R:Renderer>(_builder: &mut R, row: RenderTableRow, _err_out: &mut T) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>>
{
TreeMapResult::PendingChildren{
children: row.into_cells(),
cons: Box::new(|builders, children| {
let children: Vec<_> = children.into_iter().map(Option::unwrap).collect();
if children.iter().any(|c| !c.empty()) {
builders.append_columns_with_borders(children, true);
}
Some(None)
}),
prefn: Some(Box::new(|builder: &mut BuilderStack<R>, node| {
if let RenderNodeInfo::TableCell(ref cell) = node.info {
let sub_builder = builder.new_sub_renderer(cell.col_width.unwrap());
builder.push(sub_builder);
} else {
panic!()
}
})),
postfn: Some(Box::new(|_builder: &mut BuilderStack<R>, _| {
})),
}
}
fn render_table_cell<T:Write, R:Renderer>(_builder: &mut R, cell: RenderTableCell, _err_out: &mut T) -> TreeMapResult<'static, BuilderStack<R>, RenderNode, Option<R>>
{
pending2(cell.content, |builder: &mut BuilderStack<R>, _| {
let sub_builder = builder.pop();
Some(Some(sub_builder))
})
}
/// Reads HTML from `input`, decorates it using `decorator`, and
/// returns a `String` with text wrapped to `width` columns.
pub fn from_read_with_decorator<R, D>
(mut input: R, width: usize, decorator: D) -> String
where R: io::Read, D: TextDecorator
{
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut input)
.unwrap();
let builder = TextRenderer::new(width, decorator);
let render_tree = dom_to_render_tree(dom.document.clone(), &mut Discard{}).unwrap();
let builder = render_tree_to_string(builder, render_tree, &mut Discard{});
builder.into_string()
}
/// Reads HTML from `input`, and returns a `String` with text wrapped to
/// `width` columns.
pub fn from_read<R>(input: R, width: usize) -> String where R: io::Read {
let decorator = PlainDecorator::new();
from_read_with_decorator(input, width, decorator)
}
/// Reads HTML from `input`, and returns text wrapped to `width` columns.
/// The text is returned as a `Vec<TaggedLine<_>>`; the annotations are vectors
/// of `RichAnnotation`. The "outer" annotation comes first in the `Vec`.
pub fn from_read_rich<R>(mut input: R, width: usize) -> Vec<TaggedLine<Vec<RichAnnotation>>>
where R: io::Read
{
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut input)
.unwrap();
let decorator = RichDecorator::new();
let builder = TextRenderer::new(width, decorator);
let render_tree = dom_to_render_tree(dom.document.clone(), &mut Discard{}).unwrap();
let builder = render_tree_to_string(builder, render_tree, &mut Discard{});
builder.into_lines().into_iter().map(RenderLine::into_tagged_line).collect()
}
#[cfg(test)]
mod tests {
use super::{from_read, from_read_with_decorator, TextDecorator};
use super::render::text_renderer::TrivialDecorator;
/// Like assert_eq!(), but prints out the results normally as well
macro_rules! assert_eq_str {
($a:expr, $b:expr) => {
if $a != $b {
println!("<<<\n{}===\n{}>>>", $a, $b);
assert_eq!($a, $b);
}
}
}
fn test_html(input: &[u8], expected: &str, width: usize) {
assert_eq_str!(from_read(input, width), expected);
}
fn test_html_decorator<D>(input: &[u8], expected: &str, width: usize, decorator: D)
where D: TextDecorator
{
let output = from_read_with_decorator(input, width, decorator);
assert_eq_str!(output, expected);
}
#[test]
fn test_table() {
test_html(br##"
<table>
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
</table>
"##, r#"───┬───┬────
1 │2 │3
───┴───┴────
"#, 12);
}
#[test]
fn test_thead() {
test_html(br##"
<table>
<thead>
<tr>
<th>Col1</th>
<th>Col2</th>
<th>Col3</th>
</tr>
</thead>
<tbody>
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
</tbody>
</table>
"##, r#"────┬────┬─────
Col1│Col2│Col3
────┼────┼─────
1 │2 │3
────┴────┴─────
"#, 15);
}
#[test]
fn test_colspan() {
test_html(br##"
<table>
<tr>
<td>1</td>
<td>2</td>
<td>3</td>
</tr>
<tr>
<td colspan="2">12</td>
<td>3</td>
</tr>
<tr>
<td>1</td>
<td colspan="2">23</td>
</tr>
</table>
"##, r#"───┬───┬────
1 │2 │3
───┴───┼────
12 │3
───┬───┴────
1 │23
───┴────────
"#, 12);
}
#[test]
fn test_para() {
assert_eq_str!(from_read(&b"<p>Hello</p>"[..], 10),
"Hello\n");
}
#[test]
fn test_para2() {
assert_eq_str!(from_read(&b"<p>Hello, world!</p>"[..], 20),
"Hello, world!\n");
}
#[test]
fn test_blockquote() {
assert_eq_str!(from_read(&br#"<p>Hello</p>
<blockquote>One, two, three</blockquote>
<p>foo</p>
"#[..], 12), r#"Hello
> One, two,
> three
foo
"#);
}
#[test]
fn test_ul() {
test_html(br#"
<ul>
<li>Item one</li>
<li>Item two</li>
<li>Item three</li>
</ul>
"#, r#"* Item one
* Item two
* Item
three
"#, 10);
}
#[test]
fn test_ol1() {
test_html(br#"
<ol>
<li>Item one</li>
<li>Item two</li>
<li>Item three</li>
</ol>
"#, r#"1. Item one
2. Item two
3. Item
three
"#, 11);
}
#[test]
fn test_ol2() {
test_html(br#"
<ol>
<li>Item one</li>
<li>Item two</li>
<li>Item three</li>
<li>Item four</li>
<li>Item five</li>
<li>Item six</li>
<li>Item seven</li>
<li>Item eight</li>
<li>Item nine</li>
<li>Item ten</li>
</ol>
"#, r#"1. Item one
2. Item two
3. Item three
4. Item four
5. Item five
6. Item six
7. Item seven
8. Item eight
9. Item nine
10. Item ten
"#, 20);
}
#[test]
fn test_ol_start() {
test_html(br#"
<ol start="3">
<li>Item three</li>
<li>Item four</li>
</ol>
"#, r#"3. Item three
4. Item four
"#, 20);
}
#[test]
fn test_ol_start_9() {
test_html(br#"
<ol start="9">
<li>Item nine</li>
<li>Item ten</li>
</ol>
"#, r#"9. Item nine
10. Item ten
"#, 20);
}
#[test]
fn test_ol_start_neg() {
test_html(br#"
<ol start="-1">
<li>Item minus one</li>
<li>Item zero</li>
<li>Item one</li>
</ol>
"#, r#"-1. Item minus one
0. Item zero
1. Item one
"#, 20);
}
#[test]
fn test_strip_nl() {
test_html(br#"
<p>
One
Two
Three
</p>
"#, "One Two Three\n", 40);
}
#[test]
fn test_strip_nl2() {
test_html(br#"
<p>
One
<span>
Two
</span>
Three
</p>
"#, "One Two Three\n", 40);
}
#[test]
fn test_strip_nl_tbl() {
test_html(br#"
<table>
<tr>
<td>
One
<span>
Two
</span>
Three
</td>
</tr>
</table>
"#, r"────────────────────
One Two Three
────────────────────
", 20);
}
#[test]
fn test_unknown_element() {
test_html(br#"
<foo>
<table>
<tr>
<td>
One
<span><yyy>
Two
</yyy></span>
Three
</td>
</tr>
</table>
</foo>
"#, r"────────────────────
One Two Three
────────────────────
", 20);
}
#[test]
fn test_strip_nl_tbl_p() {
test_html(br#"
<table>
<tr>
<td><p>
One
<span>
Two
</span>
Three
</p></td>
</tr>
</table>
"#, r"────────────────────
One Two Three
────────────────────
", 20);
}
#[test]
fn test_pre() {
test_html(br#"
<pre>foo
bar
wib asdf;
</pre>
<p>Hello</p>
"#, r"foo
bar
wib asdf;
Hello
", 20);
}
#[test]
fn test_link() {
test_html(br#"
<p>Hello, <a href="http://www.example.com/">world</a></p>"#, r"Hello, [world][1]
[1] http://www.example.com/
", 80);
}
#[test]
fn test_link2() {
test_html(br#"
<p>Hello, <a href="http://www.example.com/">world</a>!</p>"#, r"Hello, [world][1]!
[1] http://www.example.com/
", 80);
}
#[test]
fn test_link3() {
test_html(br#"
<p>Hello, <a href="http://www.example.com/">w</a>orld</p>"#, r"Hello, [w][1]orld
[1] http://www.example.com/
", 80);
}
#[test]
fn test_link_wrap() {
test_html(br#"
<a href="http://www.example.com/">Hello</a>"#, r"[Hello][1]
[1] http:/
/www.examp
le.com/
", 10);
}
#[test]
fn test_wrap() {
test_html(br"<p>Hello, world. Superlongwordreally</p>",
r#"Hello,
world.
Superlon
gwordrea
lly
"#, 8);
}
#[test]
fn test_wrap2() {
test_html(br"<p>Hello, world. This is a long sentence with a
few words, which we want to be wrapped correctly.</p>",
r#"Hello, world. This
is a long sentence
with a few words,
which we want to be
wrapped correctly.
"#, 20);
}
#[test]
fn test_wrap3() {
test_html(br#"<p><a href="dest">http://example.org/blah/</a> one two three"#,
r#"[http://example.org/blah/
][1] one two three
[1] dest
"#, 25);
}
#[test]
fn test_div() {
test_html(br"<p>Hello</p><div>Div</div>",
r#"Hello
Div
"#, 20);
test_html(br"<p>Hello</p><div>Div</div><div>Div2</div>",
r#"Hello
Div
Div2
"#, 20);
}
#[test]
fn test_img_alt() {
test_html(br"<p>Hello <img src='foo.jpg' alt='world'></p>",
"Hello [world]\n", 80);
}
#[test]
fn test_br() {
test_html(br"<p>Hello<br/>World</p>",
"Hello\nWorld\n", 20);
}
#[test]
fn test_br2() {
test_html(br"<p>Hello<br/><br/>World</p>",
"Hello\n\nWorld\n", 20);
}
#[test]
fn test_br3() {
test_html(br"<p>Hello<br/> <br/>World</p>",
"Hello\n\nWorld\n", 20);
}
#[test]
fn test_subblock() {
test_html(br#"<div>
<div>Here's a <a href="https://example.com/">link</a>.</div>
<div><ul>
<li>Bullet</li>
<li>Bullet</li>
<li>Bullet</li>
</ul></div>
</div>"#,
r"Here's a [link][1].
* Bullet
* Bullet
* Bullet
[1] https://example.com/
", 80);
}
#[test]
fn test_controlchar() {
test_html("Foo\u{0080}Bar".as_bytes(), "FooBar\n", 80);
test_html("Foo\u{0080}Bar".as_bytes(), "FooB\nar\n", 4);
test_html("FooBa\u{0080}r".as_bytes(), "FooB\nar\n", 4);
}
#[test]
fn test_nested_table_1() {
test_html(br##"
<table>
<tr>
<td>
<table><tr><td>1</td><td>2</td><td>3</td></tr></table>
</td>
<td>
<table><tr><td>4</td><td>5</td><td>6</td></tr></table>
</td>
<td>
<table><tr><td>7</td><td>8</td><td>9</td></tr></table>
</td>
</tr>
<tr>
<td>
<table><tr><td>1</td><td>2</td><td>3</td></tr></table>
</td>
<td>
<table><tr><td>4</td><td>5</td><td>6</td></tr></table>
</td>
<td>
<table><tr><td>7</td><td>8</td><td>9</td></tr></table>
</td>
</tr>
<tr>
<td>
<table><tr><td>1</td><td>2</td><td>3</td></tr></table>
</td>
<td>
<table><tr><td>4</td><td>5</td><td>6</td></tr></table>
</td>
<td>
<table><tr><td>7</td><td>8</td><td>9</td></tr></table>
</td>
</tr>
</table>
"##, r#"─┬─┬──┬─┬─┬──┬─┬─┬───
1│2│3 │4│5│6 │7│8│9
─┼─┼──┼─┼─┼──┼─┼─┼───
1│2│3 │4│5│6 │7│8│9
─┼─┼──┼─┼─┼──┼─┼─┼───
1│2│3 │4│5│6 │7│8│9
─┴─┴──┴─┴─┴──┴─┴─┴───
"#, 21);
}
#[test]
fn test_nested_table_2() {
test_html(br##"
<table>
<tr>
<td>
<table>
<tr><td>1</td><td>a</td></tr>
<tr><td>2</td><td>b</td></tr>
</table>
</td>
<td><pre>one
two
three
four
five
</pre>
</td>
</tr>
</table>
"##, r#"─┬───┬─────
1│a │one
─┼───│two
2│b │three
│ │four
│ │five
─┴───┴─────
"#, 11);
}
#[test]
fn test_h1() {
test_html(br##"
<h1>Hi</h1>
<p>foo</p>
"##, r#"# Hi
foo
"#, 21);
}
#[test]
fn test_h3() {
test_html(br##"
<h3>Hi</h3>
<p>foo</p>
"##, r#"### Hi
foo
"#, 21);
}
// General test that spacing is preserved
#[test]
fn test_pre2() {
test_html(br##"<pre>Hello sp
world</pre>"##, r#"Hello sp
world
"#, 21);
}
// Check that spans work correctly inside <pre>
#[test]
fn test_pre_span() {
test_html(br##"
<pre>Hello <span>$</span>sp
<span>Hi</span> <span>$</span><span>foo</span>
<span>Hi</span> <span>foo</span><span>, </span><span>bar</span>
</pre>"##, r#"Hello $sp
Hi $foo
Hi foo, bar
"#, 21);
}
// Check tab behaviour
#[test]
fn test_pre_tab() {
test_html(b"<pre>\tworld</pre>", " world\n", 40);
test_html(b"<pre>H\tworld</pre>", "H world\n", 40);
test_html(b"<pre>He\tworld</pre>", "He world\n", 40);
test_html(b"<pre>Hel\tworld</pre>", "Hel world\n", 40);
test_html(b"<pre>Hell\tworld</pre>", "Hell world\n", 40);
test_html(b"<pre>Hello\tworld</pre>", "Hello world\n", 40);
test_html(b"<pre>Helloo\tworld</pre>", "Helloo world\n", 40);
test_html(b"<pre>Hellooo\tworld</pre>", "Hellooo world\n", 40);
test_html(b"<pre>Helloooo\tworld</pre>", "Helloooo world\n", 40);
}
#[test]
fn test_em_strong() {
test_html(br##"
<p>Hi <em>em</em> <strong>strong</strong></p>
"##, r#"Hi *em* **strong**
"#, 21);
}
#[test]
#[ignore] // Not yet fixed!
fn test_nbsp_indent() {
test_html(br##"
<div>Top</div>
<div> Indented</div>
<div> Indented again</div>
"##, r#"Top
Indented
Indented again
"#, 21);
}
#[test]
fn test_deeply_nested() {
use ::std::iter::repeat;
let html = repeat("<foo>")
.take(10000)
.collect::<Vec<_>>()
.concat();
test_html(html.as_bytes(), "", 10);
}
#[test]
fn test_deeply_nested_table() {
use ::std::iter::repeat;
let html = repeat("<table><tr><td>hi</td><td>")
.take(10000)
.collect::<Vec<_>>()
.concat()
+ &repeat("</td></tr></table>")
.take(1000)
.collect::<Vec<_>>()
.concat();
test_html(html.as_bytes(), r#"────┬─┬───
hi │h│
│i│
────┴─┴───
"#, 10);
}
#[test]
fn test_table_no_id() {
let html = r#"<html><body><table>
<tr>
<td>hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_table_cell_id() {
let html = r#"<html><body><table>
<tr>
<td id="bodyCell">hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_table_row_id() {
let html = r#"<html><body><table>
<tr id="bodyrow">
<td>hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_table_table_id() {
let html = r#"<html><body><table id="bodytable">
<tr>
<td>hi, world</td>
</tr>
</table></body></html>"#;
test_html(html.as_bytes(), r#"──────────
hi, world
──────────
"#, 10);
}
#[test]
fn test_header_width() {
//0 size
test_html(
br##"
<h2>
<table>
<h3>Anything</h3>
</table>
</h2>
"##,
r#"## ### A
## ### n
## ### y
## ### t
## ### h
## ### i
## ### n
## ### g
##
## ────
"#,
7,
);
//Underflow
test_html(
br##"
<h2>
<table>
<h3>Anything</h3>
</table>
</h2>
"##,
r#"## ### A
## ### n
## ### y
## ### t
## ### h
## ### i
## ### n
## ### g
##
## ──
"#,
5,
);
}
#[test]
fn test_trivial_decorator() {
test_html_decorator(br#"<div>
<div>Here's a <a href="https://example.com/">link</a>.</div>
<div><ul>
<li>Bullet</li>
<li>Bullet</li>
<li>Bullet</li>
</ul></div>
</div>"#,
r"Here's a link.
* Bullet
* Bullet
* Bullet
", 80, TrivialDecorator::new());
}
#[test]
fn test_issue_16()
{
test_html(b"<ul><li><!----></li></ul>", "", 10);
}
}
|
//! A formatted and aligned table printer written in rust
extern crate unicode_width;
extern crate term;
extern crate atty;
#[macro_use] extern crate lazy_static;
use std::io;
use std::io::{Write, Error};
use std::fmt;
use std::iter::{FromIterator, IntoIterator};
use std::ops::{Index, IndexMut};
use std::mem::transmute;
use term::{Terminal, stdout};
pub mod cell;
pub mod row;
pub mod format;
mod utils;
use row::Row;
use cell::Cell;
use format::{TableFormat, LinePosition, consts};
use utils::StringWriter;
/// An owned printable table
#[derive(Clone, Debug)]
pub struct Table {
format: Box<TableFormat>,
titles: Box<Option<Row>>,
rows: Vec<Row>
}
/// A borrowed immutable `Table` slice
/// A `TableSlice` is obtained by slicing a `Table` with the `Slice::slice` method.
///
/// # Examples
/// ```rust
/// # #[macro_use] extern crate prettytable;
/// use prettytable::{Table, Slice};
/// # fn main() {
/// let table = table![[1, 2, 3], [4, 5, 6], [7, 8, 9]];
/// let slice = table.slice(1..);
/// slice.printstd(); // Prints only rows 1 and 2
///
/// //Also supports other syntax :
/// table.slice(..);
/// table.slice(..2);
/// table.slice(1..3);
/// # }
/// ```
///
#[derive(Clone, Debug)]
pub struct TableSlice<'a> {
format: &'a TableFormat,
titles: &'a Option<Row>,
rows: &'a [Row]
}
impl <'a> TableSlice<'a> {
/// Compute and return the number of column
pub fn get_column_num(&self) -> usize {
let mut cnum = 0;
for r in self.rows {
let l = r.len();
if l > cnum {
cnum = l;
}
}
return cnum;
}
/// Get the number of rows
pub fn len(&self) -> usize {
return self.rows.len();
}
/// Get an immutable reference to a row
pub fn get_row(&self, row: usize) -> Option<&Row> {
return self.rows.get(row);
}
/// Get the width of the column at position `col_idx`.
/// Return 0 if the column does not exists;
fn get_column_width(&self, col_idx: usize) -> usize {
let mut width = match *self.titles {
Some(ref t) => t.get_cell_width(col_idx),
None => 0
};
for r in self.rows {
let l = r.get_cell_width(col_idx);
if l > width {
width = l;
}
}
return width;
}
/// Get the width of all columns, and return a slice
/// with the result for each column
fn get_all_column_width(&self) -> Vec<usize> {
let colnum = self.get_column_num();
let mut col_width = vec![0usize; colnum];
for i in 0..colnum {
col_width[i] = self.get_column_width(i);
}
return col_width;
}
/// Return an iterator over the immutable cells of the column specified by `column`
pub fn column_iter(&self, column: usize) -> ColumnIter {
return ColumnIter(self.rows.iter(), column);
}
/// Internal only
fn __print<T: Write+?Sized, F>(&self, out: &mut T, f: F) -> Result<(), Error>
where F: Fn(&Row, &mut T, &TableFormat, &[usize]) -> Result<(), Error> {
// Compute columns width
let col_width = self.get_all_column_width();
try!(self.format.print_line_separator(out, &col_width, LinePosition::Top));
if let Some(ref t) = *self.titles {
try!(f(t, out, &self.format, &col_width));
try!(self.format.print_line_separator(out, &col_width, LinePosition::Title));
}
// Print rows
let mut iter = self.rows.into_iter().peekable();
while let Some(r) = iter.next() {
try!(f(r, out, &self.format, &col_width));
if iter.peek().is_some() {
try!(self.format.print_line_separator(out, &col_width, LinePosition::Intern));
}
}
try!(self.format.print_line_separator(out, &col_width, LinePosition::Bottom));
return out.flush();
}
/// Print the table to `out`
pub fn print<T: Write+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.__print(out, Row::print);
}
/// Print the table to terminal `out`, applying styles when needed
pub fn print_term<T: Terminal+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.__print(out, Row::print_term);
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal, or `force_colorize` is set to `true`.
/// In ANSI terminals, colors are displayed using ANSI escape characters. When for example the
/// output is redirected to a file, or piped to another program, the output is considered
/// as not beeing tty, and ANSI escape characters won't be displayed unless `force colorize`
/// is set to `true`.
/// # Panic
/// Panic if writing to standard output fails
pub fn print_tty(&self, force_colorize: bool) {
let r = match (stdout(), atty::is() || force_colorize) {
(Some(mut o), true) => self.print_term(&mut *o),
_ => self.print(&mut io::stdout()),
};
if let Err(e) = r {
panic!("Cannot print table to standard output : {}", e);
}
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal. This means that if stdout is redirected to a file, or piped
/// to another program, no color will be displayed.
/// To force colors rendering, use `print_tty()` method.
/// Calling `printstd()` is equivalent to calling `print_tty(false)`
/// # Panic
/// Panic if writing to standard output fails
pub fn printstd(&self) {
self.print_tty(false);
}
}
impl Table {
/// Create an empty table
pub fn new() -> Table {
return Self::init(Vec::new());
}
/// Create a table initialized with `rows`
pub fn init(rows: Vec<Row>) -> Table {
return Table {
rows: rows,
titles: Box::new(None),
format: Box::new(*consts::FORMAT_DEFAULT)
};
}
/// Change the table format. Eg : Separators
pub fn set_format(&mut self, format: TableFormat) {
*self.format = format;
}
/// Compute and return the number of column
pub fn get_column_num(&self) -> usize {
return self.as_ref().get_column_num();
}
/// Get the number of rows
pub fn len(&self) -> usize {
return self.rows.len();
}
/// Set the optional title lines
pub fn set_titles(&mut self, titles: Row) {
*self.titles = Some(titles);
}
/// Unset the title line
pub fn unset_titles(&mut self) {
*self.titles = None;
}
/// Get a mutable reference to a row
pub fn get_mut_row(&mut self, row: usize) -> Option<&mut Row> {
return self.rows.get_mut(row);
}
/// Get an immutable reference to a row
pub fn get_row(&self, row: usize) -> Option<&Row> {
return self.rows.get(row);
}
/// Append a row in the table, transferring ownership of this row to the table
/// and returning a mutable reference to the row
pub fn add_row(&mut self, row: Row) -> &mut Row {
self.rows.push(row);
let l = self.rows.len()-1;
return &mut self.rows[l];
}
/// Append an empty row in the table. Return a mutable reference to this new row.
pub fn add_empty_row(&mut self) -> &mut Row {
return self.add_row(Row::default());
}
/// Insert `row` at the position `index`, and return a mutable reference to this row.
/// If index is higher than current numbers of rows, `row` is appended at the end of the table
pub fn insert_row(&mut self, index: usize, row: Row) -> &mut Row {
if index < self.rows.len() {
self.rows.insert(index, row);
return &mut self.rows[index];
} else {
return self.add_row(row);
}
}
/// Modify a single element in the table
pub fn set_element(&mut self, element: &str, column: usize, row: usize) -> Result<(), &str> {
let rowline = try!(self.get_mut_row(row).ok_or("Cannot find row"));
// TODO: If a cell already exist, copy it's alignment parameter
return rowline.set_cell(Cell::new(element), column);
}
/// Remove the row at position `index`. Silently skip if the row does not exist
pub fn remove_row(&mut self, index: usize) {
if index < self.rows.len() {
self.rows.remove(index);
}
}
/// Return an iterator over the immutable cells of the column specified by `column`
pub fn column_iter(&self, column: usize) -> ColumnIter {
return ColumnIter(self.rows.iter(), column);
}
/// Return an iterator over the mutable cells of the column specified by `column`
pub fn column_iter_mut(&mut self, column: usize) -> ColumnIterMut {
return ColumnIterMut(self.rows.iter_mut(), column);
}
/// Print the table to `out`
pub fn print<T: Write+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.as_ref().print(out);
}
/// Print the table to terminal `out`, applying styles when needed
pub fn print_term<T: Terminal+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.as_ref().print_term(out);
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal, or `force_colorize` is set to `true`.
/// In ANSI terminals, colors are displayed using ANSI escape characters. When for example the
/// output is redirected to a file, or piped to another program, the output is considered
/// as not beeing tty, and ANSI escape characters won't be displayed unless `force colorize`
/// is set to `true`.
/// # Panic
/// Panic if writing to standard output fails
pub fn print_tty(&self, force_colorize: bool) {
self.as_ref().print_tty(force_colorize);
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal. This means that if stdout is redirected to a file, or piped
/// to another program, no color will be displayed.
/// To force colors rendering, use `print_tty()` method.
/// Calling `printstd()` is equivalent to calling `print_tty(false)`
/// # Panic
/// Panic if writing to standard output fails
pub fn printstd(&self) {
self.as_ref().printstd();
}
}
impl Index<usize> for Table {
type Output = Row;
fn index(&self, idx: usize) -> &Self::Output {
return &self.rows[idx];
}
}
impl <'a> Index<usize> for TableSlice<'a> {
type Output = Row;
fn index(&self, idx: usize) -> &Self::Output {
return &self.rows[idx];
}
}
impl IndexMut<usize> for Table {
fn index_mut(&mut self, idx: usize) -> &mut Self::Output {
return &mut self.rows[idx];
}
}
impl fmt::Display for Table {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
return self.as_ref().fmt(fmt);
}
}
impl <'a> fmt::Display for TableSlice<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut writer = StringWriter::new();
if let Err(_) = self.print(&mut writer) {
return Err(fmt::Error)
}
return fmt.write_str(writer.as_string());
}
}
impl <B: ToString, A: IntoIterator<Item=B>> FromIterator<A> for Table {
fn from_iter<T>(iterator: T) -> Table where T: IntoIterator<Item=A> {
return Self::init(iterator.into_iter().map(|r| Row::from(r)).collect());
}
}
impl <T, A, B> From<T> for Table where B: ToString, A: IntoIterator<Item=B>, T : IntoIterator<Item=A> {
fn from(it: T) -> Table {
return Self::from_iter(it);
}
}
/// Iterator over immutable cells in a column
pub struct ColumnIter<'a>(std::slice::Iter<'a, Row>, usize);
impl <'a> std::iter::Iterator for ColumnIter<'a> {
type Item = &'a Cell;
fn next(&mut self) -> Option<&'a Cell> {
return match self.0.next() {
None => None,
Some(row) => row.get_cell(self.1)
}
}
}
/// Iterator over mutable cells in a column
pub struct ColumnIterMut<'a>(std::slice::IterMut<'a, Row>, usize);
impl <'a> std::iter::Iterator for ColumnIterMut<'a> {
type Item = &'a mut Cell;
fn next(&mut self) -> Option<&'a mut Cell> {
return match self.0.next() {
None => None,
Some(row) => row.get_mut_cell(self.1)
}
}
}
impl <'a> AsRef<TableSlice<'a>> for TableSlice<'a> {
fn as_ref(&self) -> &TableSlice<'a> {
return self;
}
}
impl <'a> AsRef<TableSlice<'a>> for Table {
fn as_ref(&self) -> &TableSlice<'a> {
return unsafe {
// All this is a bit hacky. Let's try to find something else
let s = &mut *((self as *const Table) as *mut Table);
s.rows.shrink_to_fit();
return transmute(self);
};
}
}
/// Trait implemented by types which can be sliced
pub trait Slice<'a, E> {
/// Type output after slicing
type Output: 'a;
/// Get a slice from self
fn slice(&'a self, arg: E) -> Self::Output;
}
impl <'a, T, E> Slice<'a, E> for T where T: AsRef<TableSlice<'a>>, [Row]: Index<E, Output=[Row]> {
type Output = TableSlice<'a>;
fn slice(&'a self, arg: E) -> Self::Output {
let sl = self.as_ref();
return TableSlice {
format: sl.format,
titles: sl.titles,
rows: sl.rows.index(arg)
}
}
}
/// Create a table filled with some values
///
/// All the arguments used for elements must implement the `std::string::ToString` trait
/// # Syntax
/// ```text
/// table!([Element1_ row1, Element2_ row1, ...], [Element1_row2, ...], ...);
/// ```
///
/// # Example
/// ```
/// # #[macro_use] extern crate prettytable;
/// # fn main() {
/// // Create a table initialized with some rows :
/// let tab = table!(["Element1", "Element2", "Element3"],
/// [1, 2, 3],
/// ["A", "B", "C"]
/// );
/// # drop(tab);
/// # }
/// ```
///
/// Some style can also be given in table creation
///
/// ```
/// # #[macro_use] extern crate prettytable;
/// # fn main() {
/// let tab = table!([FrByl->"Element1", Fgc->"Element2", "Element3"],
/// [FrBy => 1, 2, 3],
/// ["A", "B", "C"]
/// );
/// # drop(tab);
/// # }
/// ```
///
/// For details about style specifier syntax, check doc for [Cell::style_spec](cell/struct.Cell.html#method.style_spec) method
#[macro_export]
macro_rules! table {
($([$($content:tt)*]), *) => (
$crate::Table::init(vec![$(row![$($content)*]), *])
);
}
/// Create a table with `table!` macro, print it to standard output, then return this table for future usage.
///
/// The syntax is the same that the one for the `table!` macro
#[macro_export]
macro_rules! ptable {
($($content:tt)*) => (
{
let tab = table!($($content)*);
tab.printstd();
tab
}
);
}
#[cfg(test)]
mod tests {
use Table;
use Slice;
use row::Row;
use cell::Cell;
use format::consts::{FORMAT_NO_LINESEP, FORMAT_NO_COLSEP, FORMAT_CLEAN};
#[test]
fn table() {
let mut table = Table::new();
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
let out = "\
+-----+----+-----+
| t1 | t2 | t3 |
+=====+====+=====+
| a | bc | def |
+-----+----+-----+
| def | bc | a |
+-----+----+-----+
";
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn index() {
let mut table = Table::new();
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
+-----+--------+-----+
| t1 | t2 | t3 |
+=====+========+=====+
| a | bc | def |
+-----+--------+-----+
| def | newval | a |
+-----+--------+-----+
";
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn no_linesep() {
let mut table = Table::new();
table.set_format(*FORMAT_NO_LINESEP);
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
| t1 | t2 | t3 |
| a | bc | def |
| def | newval | a |
";
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn no_colsep() {
let mut table = Table::new();
table.set_format(*FORMAT_NO_COLSEP);
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
------------------
t1 t2 t3 \n\
==================
a bc def \n\
------------------
def newval a \n\
------------------
";
println!("{}", out);
println!("____");
println!("{}", table.to_string().replace("\r\n", "\n"));
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn clean() {
let mut table = Table::new();
table.set_format(*FORMAT_CLEAN);
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
\u{0020}t1 t2 t3 \n\
\u{0020}a bc def \n\
\u{0020}def newval a \n\
";
println!("{}", out);
println!("____");
println!("{}", table.to_string().replace("\r\n", "\n"));
assert_eq!(out, table.to_string().replace("\r\n", "\n"));
}
#[test]
fn slices() {
let mut table = Table::new();
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
table.add_row(Row::new(vec![Cell::new("0"), Cell::new("0"), Cell::new("0")]));
table.add_row(Row::new(vec![Cell::new("1"), Cell::new("1"), Cell::new("1")]));
table.add_row(Row::new(vec![Cell::new("2"), Cell::new("2"), Cell::new("2")]));
table.add_row(Row::new(vec![Cell::new("3"), Cell::new("3"), Cell::new("3")]));
table.add_row(Row::new(vec![Cell::new("4"), Cell::new("4"), Cell::new("4")]));
table.add_row(Row::new(vec![Cell::new("5"), Cell::new("5"), Cell::new("5")]));
let out = "\
+----+----+----+
| t1 | t2 | t3 |
+====+====+====+
| 1 | 1 | 1 |
+----+----+----+
| 2 | 2 | 2 |
+----+----+----+
| 3 | 3 | 3 |
+----+----+----+
";
let slice = table.slice(..);
let slice = slice.slice(1..);
let slice = slice.slice(..3);
assert_eq!(out, slice.to_string().replace("\r\n", "\n"));
assert_eq!(out, table.slice(1..4).to_string().replace("\r\n", "\n"));
}
}
Added github forkme ribbon to rustdoc
//! <a href="https://github.com/phsym/prettytable-rs"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://camo.githubusercontent.com/121cd7cbdc3e4855075ea8b558508b91ac463ac2/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f6c6566745f677265656e5f3030373230302e706e67" alt="Fork me on GitHub" data-canonical-src="https://s3.amazonaws.com/github/ribbons/forkme_left_green_007200.png"></a>
//! <style>.sidebar { margin-top: 53px }</style>
//! A formatted and aligned table printer written in rust
extern crate unicode_width;
extern crate term;
extern crate atty;
#[macro_use] extern crate lazy_static;
use std::io;
use std::io::{Write, Error};
use std::fmt;
use std::iter::{FromIterator, IntoIterator};
use std::ops::{Index, IndexMut};
use std::mem::transmute;
use term::{Terminal, stdout};
pub mod cell;
pub mod row;
pub mod format;
mod utils;
use row::Row;
use cell::Cell;
use format::{TableFormat, LinePosition, consts};
use utils::StringWriter;
/// An owned printable table
#[derive(Clone, Debug)]
pub struct Table {
format: Box<TableFormat>,
titles: Box<Option<Row>>,
rows: Vec<Row>
}
/// A borrowed immutable `Table` slice
/// A `TableSlice` is obtained by slicing a `Table` with the `Slice::slice` method.
///
/// # Examples
/// ```rust
/// # #[macro_use] extern crate prettytable;
/// use prettytable::{Table, Slice};
/// # fn main() {
/// let table = table![[1, 2, 3], [4, 5, 6], [7, 8, 9]];
/// let slice = table.slice(1..);
/// slice.printstd(); // Prints only rows 1 and 2
///
/// //Also supports other syntax :
/// table.slice(..);
/// table.slice(..2);
/// table.slice(1..3);
/// # }
/// ```
///
#[derive(Clone, Debug)]
pub struct TableSlice<'a> {
format: &'a TableFormat,
titles: &'a Option<Row>,
rows: &'a [Row]
}
impl <'a> TableSlice<'a> {
/// Compute and return the number of column
pub fn get_column_num(&self) -> usize {
let mut cnum = 0;
for r in self.rows {
let l = r.len();
if l > cnum {
cnum = l;
}
}
return cnum;
}
/// Get the number of rows
pub fn len(&self) -> usize {
return self.rows.len();
}
/// Get an immutable reference to a row
pub fn get_row(&self, row: usize) -> Option<&Row> {
return self.rows.get(row);
}
/// Get the width of the column at position `col_idx`.
/// Return 0 if the column does not exists;
fn get_column_width(&self, col_idx: usize) -> usize {
let mut width = match *self.titles {
Some(ref t) => t.get_cell_width(col_idx),
None => 0
};
for r in self.rows {
let l = r.get_cell_width(col_idx);
if l > width {
width = l;
}
}
return width;
}
/// Get the width of all columns, and return a slice
/// with the result for each column
fn get_all_column_width(&self) -> Vec<usize> {
let colnum = self.get_column_num();
let mut col_width = vec![0usize; colnum];
for i in 0..colnum {
col_width[i] = self.get_column_width(i);
}
return col_width;
}
/// Return an iterator over the immutable cells of the column specified by `column`
pub fn column_iter(&self, column: usize) -> ColumnIter {
return ColumnIter(self.rows.iter(), column);
}
/// Internal only
fn __print<T: Write+?Sized, F>(&self, out: &mut T, f: F) -> Result<(), Error>
where F: Fn(&Row, &mut T, &TableFormat, &[usize]) -> Result<(), Error> {
// Compute columns width
let col_width = self.get_all_column_width();
try!(self.format.print_line_separator(out, &col_width, LinePosition::Top));
if let Some(ref t) = *self.titles {
try!(f(t, out, &self.format, &col_width));
try!(self.format.print_line_separator(out, &col_width, LinePosition::Title));
}
// Print rows
let mut iter = self.rows.into_iter().peekable();
while let Some(r) = iter.next() {
try!(f(r, out, &self.format, &col_width));
if iter.peek().is_some() {
try!(self.format.print_line_separator(out, &col_width, LinePosition::Intern));
}
}
try!(self.format.print_line_separator(out, &col_width, LinePosition::Bottom));
return out.flush();
}
/// Print the table to `out`
pub fn print<T: Write+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.__print(out, Row::print);
}
/// Print the table to terminal `out`, applying styles when needed
pub fn print_term<T: Terminal+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.__print(out, Row::print_term);
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal, or `force_colorize` is set to `true`.
/// In ANSI terminals, colors are displayed using ANSI escape characters. When for example the
/// output is redirected to a file, or piped to another program, the output is considered
/// as not beeing tty, and ANSI escape characters won't be displayed unless `force colorize`
/// is set to `true`.
/// # Panic
/// Panic if writing to standard output fails
pub fn print_tty(&self, force_colorize: bool) {
let r = match (stdout(), atty::is() || force_colorize) {
(Some(mut o), true) => self.print_term(&mut *o),
_ => self.print(&mut io::stdout()),
};
if let Err(e) = r {
panic!("Cannot print table to standard output : {}", e);
}
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal. This means that if stdout is redirected to a file, or piped
/// to another program, no color will be displayed.
/// To force colors rendering, use `print_tty()` method.
/// Calling `printstd()` is equivalent to calling `print_tty(false)`
/// # Panic
/// Panic if writing to standard output fails
pub fn printstd(&self) {
self.print_tty(false);
}
}
impl Table {
/// Create an empty table
pub fn new() -> Table {
return Self::init(Vec::new());
}
/// Create a table initialized with `rows`
pub fn init(rows: Vec<Row>) -> Table {
return Table {
rows: rows,
titles: Box::new(None),
format: Box::new(*consts::FORMAT_DEFAULT)
};
}
/// Change the table format. Eg : Separators
pub fn set_format(&mut self, format: TableFormat) {
*self.format = format;
}
/// Compute and return the number of column
pub fn get_column_num(&self) -> usize {
return self.as_ref().get_column_num();
}
/// Get the number of rows
pub fn len(&self) -> usize {
return self.rows.len();
}
/// Set the optional title lines
pub fn set_titles(&mut self, titles: Row) {
*self.titles = Some(titles);
}
/// Unset the title line
pub fn unset_titles(&mut self) {
*self.titles = None;
}
/// Get a mutable reference to a row
pub fn get_mut_row(&mut self, row: usize) -> Option<&mut Row> {
return self.rows.get_mut(row);
}
/// Get an immutable reference to a row
pub fn get_row(&self, row: usize) -> Option<&Row> {
return self.rows.get(row);
}
/// Append a row in the table, transferring ownership of this row to the table
/// and returning a mutable reference to the row
pub fn add_row(&mut self, row: Row) -> &mut Row {
self.rows.push(row);
let l = self.rows.len()-1;
return &mut self.rows[l];
}
/// Append an empty row in the table. Return a mutable reference to this new row.
pub fn add_empty_row(&mut self) -> &mut Row {
return self.add_row(Row::default());
}
/// Insert `row` at the position `index`, and return a mutable reference to this row.
/// If index is higher than current numbers of rows, `row` is appended at the end of the table
pub fn insert_row(&mut self, index: usize, row: Row) -> &mut Row {
if index < self.rows.len() {
self.rows.insert(index, row);
return &mut self.rows[index];
} else {
return self.add_row(row);
}
}
/// Modify a single element in the table
pub fn set_element(&mut self, element: &str, column: usize, row: usize) -> Result<(), &str> {
let rowline = try!(self.get_mut_row(row).ok_or("Cannot find row"));
// TODO: If a cell already exist, copy it's alignment parameter
return rowline.set_cell(Cell::new(element), column);
}
/// Remove the row at position `index`. Silently skip if the row does not exist
pub fn remove_row(&mut self, index: usize) {
if index < self.rows.len() {
self.rows.remove(index);
}
}
/// Return an iterator over the immutable cells of the column specified by `column`
pub fn column_iter(&self, column: usize) -> ColumnIter {
return ColumnIter(self.rows.iter(), column);
}
/// Return an iterator over the mutable cells of the column specified by `column`
pub fn column_iter_mut(&mut self, column: usize) -> ColumnIterMut {
return ColumnIterMut(self.rows.iter_mut(), column);
}
/// Print the table to `out`
pub fn print<T: Write+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.as_ref().print(out);
}
/// Print the table to terminal `out`, applying styles when needed
pub fn print_term<T: Terminal+?Sized>(&self, out: &mut T) -> Result<(), Error> {
return self.as_ref().print_term(out);
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal, or `force_colorize` is set to `true`.
/// In ANSI terminals, colors are displayed using ANSI escape characters. When for example the
/// output is redirected to a file, or piped to another program, the output is considered
/// as not beeing tty, and ANSI escape characters won't be displayed unless `force colorize`
/// is set to `true`.
/// # Panic
/// Panic if writing to standard output fails
pub fn print_tty(&self, force_colorize: bool) {
self.as_ref().print_tty(force_colorize);
}
/// Print the table to standard output. Colors won't be displayed unless
/// stdout is a tty terminal. This means that if stdout is redirected to a file, or piped
/// to another program, no color will be displayed.
/// To force colors rendering, use `print_tty()` method.
/// Calling `printstd()` is equivalent to calling `print_tty(false)`
/// # Panic
/// Panic if writing to standard output fails
pub fn printstd(&self) {
self.as_ref().printstd();
}
}
impl Index<usize> for Table {
type Output = Row;
fn index(&self, idx: usize) -> &Self::Output {
return &self.rows[idx];
}
}
impl <'a> Index<usize> for TableSlice<'a> {
type Output = Row;
fn index(&self, idx: usize) -> &Self::Output {
return &self.rows[idx];
}
}
impl IndexMut<usize> for Table {
fn index_mut(&mut self, idx: usize) -> &mut Self::Output {
return &mut self.rows[idx];
}
}
impl fmt::Display for Table {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
return self.as_ref().fmt(fmt);
}
}
impl <'a> fmt::Display for TableSlice<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut writer = StringWriter::new();
if let Err(_) = self.print(&mut writer) {
return Err(fmt::Error)
}
return fmt.write_str(writer.as_string());
}
}
impl <B: ToString, A: IntoIterator<Item=B>> FromIterator<A> for Table {
fn from_iter<T>(iterator: T) -> Table where T: IntoIterator<Item=A> {
return Self::init(iterator.into_iter().map(|r| Row::from(r)).collect());
}
}
impl <T, A, B> From<T> for Table where B: ToString, A: IntoIterator<Item=B>, T : IntoIterator<Item=A> {
fn from(it: T) -> Table {
return Self::from_iter(it);
}
}
/// Iterator over immutable cells in a column
pub struct ColumnIter<'a>(std::slice::Iter<'a, Row>, usize);
impl <'a> std::iter::Iterator for ColumnIter<'a> {
type Item = &'a Cell;
fn next(&mut self) -> Option<&'a Cell> {
return match self.0.next() {
None => None,
Some(row) => row.get_cell(self.1)
}
}
}
/// Iterator over mutable cells in a column
pub struct ColumnIterMut<'a>(std::slice::IterMut<'a, Row>, usize);
impl <'a> std::iter::Iterator for ColumnIterMut<'a> {
type Item = &'a mut Cell;
fn next(&mut self) -> Option<&'a mut Cell> {
return match self.0.next() {
None => None,
Some(row) => row.get_mut_cell(self.1)
}
}
}
impl <'a> AsRef<TableSlice<'a>> for TableSlice<'a> {
fn as_ref(&self) -> &TableSlice<'a> {
return self;
}
}
impl <'a> AsRef<TableSlice<'a>> for Table {
fn as_ref(&self) -> &TableSlice<'a> {
return unsafe {
// All this is a bit hacky. Let's try to find something else
let s = &mut *((self as *const Table) as *mut Table);
s.rows.shrink_to_fit();
return transmute(self);
};
}
}
/// Trait implemented by types which can be sliced
pub trait Slice<'a, E> {
/// Type output after slicing
type Output: 'a;
/// Get a slice from self
fn slice(&'a self, arg: E) -> Self::Output;
}
impl <'a, T, E> Slice<'a, E> for T where T: AsRef<TableSlice<'a>>, [Row]: Index<E, Output=[Row]> {
type Output = TableSlice<'a>;
fn slice(&'a self, arg: E) -> Self::Output {
let sl = self.as_ref();
return TableSlice {
format: sl.format,
titles: sl.titles,
rows: sl.rows.index(arg)
}
}
}
/// Create a table filled with some values
///
/// All the arguments used for elements must implement the `std::string::ToString` trait
/// # Syntax
/// ```text
/// table!([Element1_ row1, Element2_ row1, ...], [Element1_row2, ...], ...);
/// ```
///
/// # Example
/// ```
/// # #[macro_use] extern crate prettytable;
/// # fn main() {
/// // Create a table initialized with some rows :
/// let tab = table!(["Element1", "Element2", "Element3"],
/// [1, 2, 3],
/// ["A", "B", "C"]
/// );
/// # drop(tab);
/// # }
/// ```
///
/// Some style can also be given in table creation
///
/// ```
/// # #[macro_use] extern crate prettytable;
/// # fn main() {
/// let tab = table!([FrByl->"Element1", Fgc->"Element2", "Element3"],
/// [FrBy => 1, 2, 3],
/// ["A", "B", "C"]
/// );
/// # drop(tab);
/// # }
/// ```
///
/// For details about style specifier syntax, check doc for [Cell::style_spec](cell/struct.Cell.html#method.style_spec) method
#[macro_export]
macro_rules! table {
($([$($content:tt)*]), *) => (
$crate::Table::init(vec![$(row![$($content)*]), *])
);
}
/// Create a table with `table!` macro, print it to standard output, then return this table for future usage.
///
/// The syntax is the same that the one for the `table!` macro
#[macro_export]
macro_rules! ptable {
($($content:tt)*) => (
{
let tab = table!($($content)*);
tab.printstd();
tab
}
);
}
#[cfg(test)]
mod tests {
use Table;
use Slice;
use row::Row;
use cell::Cell;
use format::consts::{FORMAT_NO_LINESEP, FORMAT_NO_COLSEP, FORMAT_CLEAN};
#[test]
fn table() {
let mut table = Table::new();
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
let out = "\
+-----+----+-----+
| t1 | t2 | t3 |
+=====+====+=====+
| a | bc | def |
+-----+----+-----+
| def | bc | a |
+-----+----+-----+
";
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn index() {
let mut table = Table::new();
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
+-----+--------+-----+
| t1 | t2 | t3 |
+=====+========+=====+
| a | bc | def |
+-----+--------+-----+
| def | newval | a |
+-----+--------+-----+
";
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn no_linesep() {
let mut table = Table::new();
table.set_format(*FORMAT_NO_LINESEP);
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
| t1 | t2 | t3 |
| a | bc | def |
| def | newval | a |
";
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn no_colsep() {
let mut table = Table::new();
table.set_format(*FORMAT_NO_COLSEP);
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
------------------
t1 t2 t3 \n\
==================
a bc def \n\
------------------
def newval a \n\
------------------
";
println!("{}", out);
println!("____");
println!("{}", table.to_string().replace("\r\n", "\n"));
assert_eq!(table.to_string().replace("\r\n", "\n"), out);
}
#[test]
fn clean() {
let mut table = Table::new();
table.set_format(*FORMAT_CLEAN);
table.add_row(Row::new(vec![Cell::new("a"), Cell::new("bc"), Cell::new("def")]));
table.add_row(Row::new(vec![Cell::new("def"), Cell::new("bc"), Cell::new("a")]));
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
assert_eq!(table[1][1].get_content(), "bc");
table[1][1] = Cell::new("newval");
assert_eq!(table[1][1].get_content(), "newval");
let out = "\
\u{0020}t1 t2 t3 \n\
\u{0020}a bc def \n\
\u{0020}def newval a \n\
";
println!("{}", out);
println!("____");
println!("{}", table.to_string().replace("\r\n", "\n"));
assert_eq!(out, table.to_string().replace("\r\n", "\n"));
}
#[test]
fn slices() {
let mut table = Table::new();
table.set_titles(Row::new(vec![Cell::new("t1"), Cell::new("t2"), Cell::new("t3")]));
table.add_row(Row::new(vec![Cell::new("0"), Cell::new("0"), Cell::new("0")]));
table.add_row(Row::new(vec![Cell::new("1"), Cell::new("1"), Cell::new("1")]));
table.add_row(Row::new(vec![Cell::new("2"), Cell::new("2"), Cell::new("2")]));
table.add_row(Row::new(vec![Cell::new("3"), Cell::new("3"), Cell::new("3")]));
table.add_row(Row::new(vec![Cell::new("4"), Cell::new("4"), Cell::new("4")]));
table.add_row(Row::new(vec![Cell::new("5"), Cell::new("5"), Cell::new("5")]));
let out = "\
+----+----+----+
| t1 | t2 | t3 |
+====+====+====+
| 1 | 1 | 1 |
+----+----+----+
| 2 | 2 | 2 |
+----+----+----+
| 3 | 3 | 3 |
+----+----+----+
";
let slice = table.slice(..);
let slice = slice.slice(1..);
let slice = slice.slice(..3);
assert_eq!(out, slice.to_string().replace("\r\n", "\n"));
assert_eq!(out, table.slice(1..4).to_string().replace("\r\n", "\n"));
}
}
|
/// Calculate the crc64 checksum of the given data, starting with the given crc.
///
/// Implements the CRC64 used by Redis, which is the variant with "Jones" coefficients and init value of 0.
///
/// Specification of this CRC64 variant follows:
///
/// ```text
/// Name: crc-64-jones
/// Width: 64 bites
/// Poly: 0xad93d23594c935a9
/// Reflected In: True
/// Xor_In: 0xffffffffffffffff
/// Reflected_Out: True
/// Xor_Out: 0x0
/// Check("123456789"): 0xe9c6d914c4b8d9ca
/// ```
///
/// Example:
///
/// ```rust
/// crc64::crc64(0, "123456789".as_bytes());
/// ```
use std::io;
use std::io::Write;
use std::mem;
use crc_table::CRC64_TAB;
mod crc_table;
fn crc_reflect(data: u64, len: usize) -> u64 {
let mut data = data;
let mut ret = data & 0x01;
let mut i = 1;
while i < len {
data >>= 1;
ret = (ret << 1) | (data & 0x01);
i+=1;
}
ret
}
fn crc64_trivial(crc: u64, in_data: &[u8]) -> u64 {
let mut crc = crc;
let len = in_data.len();
let poly : u64 = 0xad93d23594c935a9;
let mut bit : bool;
let mut offset = 0;
while offset < len {
let c = in_data[offset];
let mut i = 0x01;
while i & 0xff != 0x0 {
bit = crc & 0x8000000000000000 != 0x0;
if c & i != 0x0 {
bit = !bit;
}
crc <<= 1;
if bit {
crc ^= poly;
}
i <<= 1;
}
crc &= 0xffffffffffffffff;
offset+=1;
}
crc = crc & 0xffffffffffffffff;
return crc_reflect(crc, 64) ^ 0x0000000000000000;
}
pub fn crc64_init() -> Vec<Vec<u64>> {
let mut crc : u64;
let mut table : Vec<Vec<u64>> = Vec::with_capacity(8);
for _ in 0..8 {
table.push(Vec::with_capacity(256));
};
for n in 0..256 {
table[0].push(crc64_trivial(0, &vec![n as u8]));
table[1].push(0);
table[2].push(0);
table[3].push(0);
table[4].push(0);
table[5].push(0);
table[6].push(0);
table[7].push(0);
}
for n in 0..256 {
crc = table[0][n];
for k in 1..8 {
let idx = (crc as usize) & 0xff;
crc = table[0][idx] ^ (crc >> 8);
table[k][n] = crc;
}
};
table
}
// transmute slice of 8 u8 values to one u64 (drop the length)
macro_rules! slice_to_long {
($curVec:expr) => {
{
unsafe {
let (tmp, _) : (*const u64, usize) = mem::transmute(&$curVec);
*tmp
}
}
}
}
pub fn crc64(crc: u64, data: &[u8]) -> u64 {
let mut crc = crc;
let mut len = data.len();
let mut offset = 0usize;
while len >= 8 {
crc ^= slice_to_long!(data[offset..(offset+8)]);
crc = CRC64_TAB[7][(crc & 0xff) as usize] ^
CRC64_TAB[6][((crc >> 8) & 0xff) as usize] ^
CRC64_TAB[5][((crc >> 16) & 0xff) as usize] ^
CRC64_TAB[4][((crc >> 24) & 0xff) as usize] ^
CRC64_TAB[3][((crc >> 32) & 0xff) as usize] ^
CRC64_TAB[2][((crc >> 40) & 0xff) as usize] ^
CRC64_TAB[1][((crc >> 48) & 0xff) as usize] ^
CRC64_TAB[0][(crc >> 56) as usize];
offset += 8;
len -= 8;
}
while len > 0 {
crc = CRC64_TAB[0][((crc ^ data[offset] as u64) & 0xff) as usize] ^ (crc >> 8);
offset += 1;
len -= 1;
}
crc
}
pub struct Crc64 {
crc64: u64,
}
impl Crc64 {
pub fn new() -> Crc64 {
Crc64 { crc64: 0 }
}
pub fn get(&self) -> u64 {
self.crc64
}
}
impl Write for Crc64 {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.crc64 = crc64(self.crc64, buf);
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[test]
fn test_crc64_works() {
assert_eq!(0xe9c6d914c4b8d9ca, crc64(0, "123456789".as_bytes()))
}
#[test]
fn test_crc64_write() {
let step1 = "12345".as_bytes();
let step2 = "6789".as_bytes();
let value1 = 17326901458626182669;
let value2 = 16845390139448941002;
assert_eq!(value1, crc64(0, &step1));
assert_eq!(value2, crc64(value1, &step2));
let mut crc = Crc64::new();
assert_eq!(crc.write(&step1).unwrap(), step1.len());
assert_eq!(value1, crc.get());
assert_eq!(crc.write(&step2).unwrap(), step2.len());
assert_eq!(value2, crc.get());
}
docs: Proper module documentation
//! Calculate the crc64 checksum of the given data, starting with the given crc.
//!
//! Implements the CRC64 used by Redis, which is the variant with "Jones" coefficients and init value of 0.
//!
//! Specification of this CRC64 variant follows:
//!
//! ```text
//! Name: crc-64-jones
//! Width: 64 bites
//! Poly: 0xad93d23594c935a9
//! Reflected In: True
//! Xor_In: 0xffffffffffffffff
//! Reflected_Out: True
//! Xor_Out: 0x0
//! Check("123456789"): 0xe9c6d914c4b8d9ca
//! ```
//!
//! Example:
//!
//! ```rust
//! let cksum = crc64::crc64(0, "123456789".as_bytes());
//! assert_eq!(16845390139448941002, cksum);
//! ```
use std::io;
use std::io::Write;
use std::mem;
use crc_table::CRC64_TAB;
mod crc_table;
fn crc_reflect(data: u64, len: usize) -> u64 {
let mut data = data;
let mut ret = data & 0x01;
let mut i = 1;
while i < len {
data >>= 1;
ret = (ret << 1) | (data & 0x01);
i+=1;
}
ret
}
fn crc64_trivial(crc: u64, in_data: &[u8]) -> u64 {
let mut crc = crc;
let len = in_data.len();
let poly : u64 = 0xad93d23594c935a9;
let mut bit : bool;
let mut offset = 0;
while offset < len {
let c = in_data[offset];
let mut i = 0x01;
while i & 0xff != 0x0 {
bit = crc & 0x8000000000000000 != 0x0;
if c & i != 0x0 {
bit = !bit;
}
crc <<= 1;
if bit {
crc ^= poly;
}
i <<= 1;
}
crc &= 0xffffffffffffffff;
offset+=1;
}
crc = crc & 0xffffffffffffffff;
return crc_reflect(crc, 64) ^ 0x0000000000000000;
}
pub fn crc64_init() -> Vec<Vec<u64>> {
let mut crc : u64;
let mut table : Vec<Vec<u64>> = Vec::with_capacity(8);
for _ in 0..8 {
table.push(Vec::with_capacity(256));
};
for n in 0..256 {
table[0].push(crc64_trivial(0, &vec![n as u8]));
table[1].push(0);
table[2].push(0);
table[3].push(0);
table[4].push(0);
table[5].push(0);
table[6].push(0);
table[7].push(0);
}
for n in 0..256 {
crc = table[0][n];
for k in 1..8 {
let idx = (crc as usize) & 0xff;
crc = table[0][idx] ^ (crc >> 8);
table[k][n] = crc;
}
};
table
}
// transmute slice of 8 u8 values to one u64 (drop the length)
macro_rules! slice_to_long {
($curVec:expr) => {
{
unsafe {
let (tmp, _) : (*const u64, usize) = mem::transmute(&$curVec);
*tmp
}
}
}
}
pub fn crc64(crc: u64, data: &[u8]) -> u64 {
let mut crc = crc;
let mut len = data.len();
let mut offset = 0usize;
while len >= 8 {
crc ^= slice_to_long!(data[offset..(offset+8)]);
crc = CRC64_TAB[7][(crc & 0xff) as usize] ^
CRC64_TAB[6][((crc >> 8) & 0xff) as usize] ^
CRC64_TAB[5][((crc >> 16) & 0xff) as usize] ^
CRC64_TAB[4][((crc >> 24) & 0xff) as usize] ^
CRC64_TAB[3][((crc >> 32) & 0xff) as usize] ^
CRC64_TAB[2][((crc >> 40) & 0xff) as usize] ^
CRC64_TAB[1][((crc >> 48) & 0xff) as usize] ^
CRC64_TAB[0][(crc >> 56) as usize];
offset += 8;
len -= 8;
}
while len > 0 {
crc = CRC64_TAB[0][((crc ^ data[offset] as u64) & 0xff) as usize] ^ (crc >> 8);
offset += 1;
len -= 1;
}
crc
}
pub struct Crc64 {
crc64: u64,
}
impl Crc64 {
pub fn new() -> Crc64 {
Crc64 { crc64: 0 }
}
pub fn get(&self) -> u64 {
self.crc64
}
}
impl Write for Crc64 {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.crc64 = crc64(self.crc64, buf);
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[test]
fn test_crc64_works() {
assert_eq!(0xe9c6d914c4b8d9ca, crc64(0, "123456789".as_bytes()))
}
#[test]
fn test_crc64_write() {
let step1 = "12345".as_bytes();
let step2 = "6789".as_bytes();
let value1 = 17326901458626182669;
let value2 = 16845390139448941002;
assert_eq!(value1, crc64(0, &step1));
assert_eq!(value2, crc64(value1, &step2));
let mut crc = Crc64::new();
assert_eq!(crc.write(&step1).unwrap(), step1.len());
assert_eq!(value1, crc.get());
assert_eq!(crc.write(&step2).unwrap(), step2.len());
assert_eq!(value2, crc.get());
}
|
#![feature(asm)]
#![feature(no_std)]
#![no_std]
#[test]
fn it_works() {
}
Add skeletal kernel source
#![crate_name = "sos_kernel"]
#![crate_type = "staticlib"]
#![feature(asm)]
#![feature(no_std, lang_items)]
#![no_std]
/// Kernel main loop
#[no_mangle]
pub extern fn kernel_main() {}
/// Required for Rust stack unwinding
#[lang = "eh_personality"]
extern fn eh_personality() {
// TODO: add support for stack unwinding
}
#[lang = "panic_fmt"]
extern fn panic_fmt() -> ! {
// TODO: actually format panics (waiting for robust VGA support)
loop{}
}
|
extern crate crossbeam;
extern crate os_pipe;
use std::borrow::Borrow;
use std::collections::HashMap;
use std::ffi::{OsStr, OsString};
use std::fs::File;
use std::io;
use std::io::prelude::*;
#[cfg(unix)]
use std::os::unix::process::ExitStatusExt;
#[cfg(windows)]
use std::os::windows::process::ExitStatusExt;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio, Output, ExitStatus};
use std::thread::JoinHandle;
use std::sync::Arc;
// enums defined below
use ExpressionInner::*;
use IoExpressionInner::*;
pub fn cmd<T: AsRef<OsStr>>(argv: &[T]) -> Expression {
let argv_vec = argv.iter().map(|arg| arg.as_ref().to_owned()).collect();
Expression::new(Cmd(argv_vec))
}
#[macro_export]
macro_rules! cmd {
( $( $x:expr ),* ) => {
{
use std::ffi::OsStr;
let mut temp_vec = Vec::new();
$(
let temp_arg = $x;
let temp_osstr: &OsStr = temp_arg.as_ref();
temp_vec.push(temp_osstr.to_owned());
)*
$crate::cmd(&temp_vec)
}
};
}
pub fn sh<T: AsRef<OsStr>>(command: T) -> Expression {
Expression::new(Sh(command.as_ref().to_owned()))
}
#[derive(Clone, Debug)]
#[must_use]
pub struct Expression {
inner: Arc<ExpressionInner>,
}
impl Expression {
pub fn run(&self) -> Result<Output, Error> {
let (context, stdout_reader, stderr_reader) = try!(IoContext::new());
let status = try!(self.inner.exec(context));
let stdout_vec = try!(stdout_reader.join().unwrap());
let stderr_vec = try!(stderr_reader.join().unwrap());
let output = Output {
status: status,
stdout: stdout_vec,
stderr: stderr_vec,
};
if !output.status.success() {
Err(Error::Status(output))
} else {
Ok(output)
}
}
pub fn read(&self) -> Result<String, Error> {
let output = try!(self.capture_stdout().run());
let output_str = try!(std::str::from_utf8(&output.stdout));
Ok(trim_right_newlines(output_str).to_owned())
}
pub fn pipe<T: Borrow<Expression>>(&self, right: T) -> Expression {
Self::new(Pipe(self.clone(), right.borrow().clone()))
}
pub fn then<T: Borrow<Expression>>(&self, right: T) -> Expression {
Self::new(Then(self.clone(), right.borrow().clone()))
}
pub fn input<T: AsRef<[u8]>>(&self, input: T) -> Self {
Self::new(Io(Input(input.as_ref().to_vec()), self.clone()))
}
pub fn stdin<T: Into<FileOpener>>(&self, stdin: T) -> Self {
Self::new(Io(Stdin(stdin.into()), self.clone()))
}
pub fn null_stdin(&self) -> Self {
Self::new(Io(StdinNull, self.clone()))
}
pub fn stdout<T: Into<FileOpener>>(&self, stdout: T) -> Self {
Self::new(Io(Stdout(stdout.into()), self.clone()))
}
pub fn null_stdout(&self) -> Self {
Self::new(Io(StdoutNull, self.clone()))
}
pub fn capture_stdout(&self) -> Self {
Self::new(Io(StdoutCapture, self.clone()))
}
pub fn stdout_to_stderr(&self) -> Self {
Self::new(Io(StdoutToStderr, self.clone()))
}
pub fn stderr<T: Into<FileOpener>>(&self, stderr: T) -> Self {
Self::new(Io(Stderr(stderr.into()), self.clone()))
}
pub fn null_stderr(&self) -> Self {
Self::new(Io(StderrNull, self.clone()))
}
pub fn capture_stderr(&self) -> Self {
Self::new(Io(StderrCapture, self.clone()))
}
pub fn stderr_to_stdout(&self) -> Self {
Self::new(Io(StderrToStdout, self.clone()))
}
pub fn dir<T: AsRef<Path>>(&self, path: T) -> Self {
Self::new(Io(Dir(path.as_ref().to_owned()), self.clone()))
}
pub fn env<T: AsRef<OsStr>, U: AsRef<OsStr>>(&self, name: T, val: U) -> Self {
Self::new(Io(Env(name.as_ref().to_owned(), val.as_ref().to_owned()),
self.clone()))
}
pub fn full_env<T, U, V>(&self, name_vals: T) -> Self
where T: IntoIterator<Item = (U, V)>,
U: AsRef<OsStr>,
V: AsRef<OsStr>
{
let env_map = name_vals.into_iter()
.map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))
.collect();
Self::new(Io(FullEnv(env_map), self.clone()))
}
pub fn unchecked(&self) -> Self {
Self::new(Io(Unchecked, self.clone()))
}
fn new(inner: ExpressionInner) -> Self {
Expression { inner: Arc::new(inner) }
}
}
#[derive(Debug)]
enum ExpressionInner {
Cmd(Vec<OsString>),
Sh(OsString),
Pipe(Expression, Expression),
Then(Expression, Expression),
Io(IoExpressionInner, Expression),
}
impl ExpressionInner {
fn exec(&self, context: IoContext) -> io::Result<ExitStatus> {
match *self {
Cmd(ref argv) => exec_argv(argv, context),
Sh(ref command) => exec_sh(command, context),
Pipe(ref left, ref right) => exec_pipe(left, right, context),
Then(ref left, ref right) => exec_then(left, right, context),
Io(ref io_inner, ref expr) => exec_io(io_inner, expr, context),
}
}
}
fn exec_argv<T: AsRef<OsStr>>(argv: &[T], context: IoContext) -> io::Result<ExitStatus> {
let mut command = Command::new(&argv[0]);
command.args(&argv[1..]);
// TODO: Avoid unnecessary dup'ing here.
command.stdin(try!(context.stdin.into_stdio()));
command.stdout(try!(context.stdout.into_stdio()));
command.stderr(try!(context.stderr.into_stdio()));
command.current_dir(context.dir);
command.env_clear();
for (name, val) in context.env {
command.env(name, val);
}
Ok(try!(command.status()))
}
#[cfg(unix)]
fn shell_command_argv(command: OsString) -> [OsString; 3] {
[OsStr::new("/bin/sh").to_owned(), OsStr::new("-c").to_owned(), command]
}
#[cfg(windows)]
fn shell_command_argv(command: OsString) -> [OsString; 3] {
let comspec = std::env::var_os("COMSPEC").unwrap_or(OsStr::new("cmd.exe").to_owned());
[comspec, OsStr::new("/C").to_owned(), command]
}
fn exec_sh<T: AsRef<OsStr>>(command: T, context: IoContext) -> io::Result<ExitStatus> {
exec_argv(&shell_command_argv(command.as_ref().to_owned()), context)
}
fn exec_pipe(left: &Expression, right: &Expression, context: IoContext) -> io::Result<ExitStatus> {
let pair = try!(os_pipe::pipe());
let mut left_context = try!(context.try_clone()); // dup'ing stdin/stdout isn't strictly necessary, but no big deal
left_context.stdout = IoValue::File(pair.write);
let mut right_context = context;
right_context.stdin = IoValue::File(pair.read);
let (left_result, right_result) = crossbeam::scope(|scope| {
let left_joiner = scope.spawn(|| left.inner.exec(left_context));
let right_result = right.inner.exec(right_context);
let left_result = left_joiner.join();
(left_result, right_result)
});
let right_status = try!(right_result);
let left_status = try!(left_result);
if !right_status.success() {
Ok(right_status)
} else {
Ok(left_status)
}
}
fn exec_then(left: &Expression, right: &Expression, context: IoContext) -> io::Result<ExitStatus> {
let status = try!(left.inner.exec(try!(context.try_clone())));
if !status.success() {
Ok(status)
} else {
right.inner.exec(context)
}
}
fn exec_io(io_inner: &IoExpressionInner,
expr: &Expression,
context: IoContext)
-> io::Result<ExitStatus> {
{
crossbeam::scope(|scope| {
let (new_context, maybe_writer_thread) = try!(io_inner.update_context(context, scope));
let exec_result = expr.inner.exec(new_context);
let writer_result = join_maybe_writer_thread(maybe_writer_thread);
// Propagate any exec errors first.
let exec_status = try!(exec_result);
// Then propagate any writer thread errors.
try!(writer_result);
// Finally, implement unchecked() status suppression here.
if let &Unchecked = io_inner {
Ok(ExitStatus::from_raw(0))
} else {
Ok(exec_status)
}
})
}
}
#[derive(Debug)]
enum IoExpressionInner {
Input(Vec<u8>),
Stdin(FileOpener),
StdinNull,
Stdout(FileOpener),
StdoutNull,
StdoutCapture,
StdoutToStderr,
Stderr(FileOpener),
StderrNull,
StderrCapture,
StderrToStdout,
Dir(PathBuf),
Env(OsString, OsString),
FullEnv(HashMap<OsString, OsString>),
Unchecked,
}
impl IoExpressionInner {
fn update_context<'a>(&'a self,
mut context: IoContext,
scope: &crossbeam::Scope<'a>)
-> io::Result<(IoContext, Option<WriterThread>)> {
let mut maybe_thread = None;
match *self {
Input(ref v) => {
let (reader, thread) = try!(pipe_with_writer_thread(v, scope));
context.stdin = IoValue::File(reader);
maybe_thread = Some(thread)
}
Stdin(ref f) => {
context.stdin = IoValue::File(try!(f.open_for_reading()));
}
StdinNull => {
context.stdin = IoValue::Null;
}
Stdout(ref f) => {
context.stdout = IoValue::File(try!(f.open_for_writing()));
}
StdoutNull => {
context.stdout = IoValue::Null;
}
StdoutCapture => {
context.stdout = IoValue::File(try!(context.stdout_capture.try_clone()))
}
StdoutToStderr => {
context.stdout = try!(context.stderr.try_clone());
}
Stderr(ref f) => {
context.stderr = IoValue::File(try!(f.open_for_writing()));
}
StderrNull => {
context.stderr = IoValue::Null;
}
StderrCapture => {
context.stderr = IoValue::File(try!(context.stderr_capture.try_clone()))
}
StderrToStdout => {
context.stderr = try!(context.stdout.try_clone());
}
Dir(ref p) => {
context.dir = p.clone();
}
Env(ref name, ref val) => {
context.env.insert(name.clone(), val.clone());
}
FullEnv(ref map) => {
context.env = map.clone();
}
Unchecked => {
// No-op. Unchecked is handled in exec_io().
}
}
Ok((context, maybe_thread))
}
}
#[derive(Debug)]
pub enum FileOpener {
PathBuf(PathBuf),
File(File),
}
impl FileOpener {
fn open_for_reading(&self) -> io::Result<File> {
match *self {
FileOpener::PathBuf(ref p) => File::open(p),
FileOpener::File(ref f) => f.try_clone(),
}
}
fn open_for_writing(&self) -> io::Result<File> {
match *self {
FileOpener::PathBuf(ref p) => File::create(p),
FileOpener::File(ref f) => f.try_clone(),
}
}
}
impl From<File> for FileOpener {
fn from(f: File) -> FileOpener {
FileOpener::File(f)
}
}
// TODO: Get rid of most of these impl's once specialization lands.
impl<'a> From<&'a str> for FileOpener {
fn from(s: &str) -> FileOpener {
FileOpener::PathBuf(AsRef::<Path>::as_ref(s).to_owned())
}
}
impl<'a> From<&'a String> for FileOpener {
fn from(s: &String) -> FileOpener {
FileOpener::PathBuf(s.clone().into())
}
}
impl From<String> for FileOpener {
fn from(s: String) -> FileOpener {
FileOpener::PathBuf(s.into())
}
}
impl<'a> From<&'a Path> for FileOpener {
fn from(p: &Path) -> FileOpener {
FileOpener::PathBuf(p.to_owned())
}
}
impl<'a> From<&'a PathBuf> for FileOpener {
fn from(p: &PathBuf) -> FileOpener {
FileOpener::PathBuf(p.clone())
}
}
impl From<PathBuf> for FileOpener {
fn from(p: PathBuf) -> FileOpener {
FileOpener::PathBuf(p)
}
}
impl<'a> From<&'a OsStr> for FileOpener {
fn from(s: &OsStr) -> FileOpener {
FileOpener::PathBuf(s.clone().into())
}
}
impl<'a> From<&'a OsString> for FileOpener {
fn from(s: &OsString) -> FileOpener {
FileOpener::PathBuf(AsRef::<Path>::as_ref(s).to_owned())
}
}
impl From<OsString> for FileOpener {
fn from(s: OsString) -> FileOpener {
FileOpener::PathBuf(s.into())
}
}
#[derive(Debug)]
pub enum Error {
Io(io::Error),
Utf8(std::str::Utf8Error),
Status(Output),
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(err: std::str::Utf8Error) -> Error {
Error::Utf8(err)
}
}
// An IoContext represents the file descriptors child processes are talking to at execution time.
// It's initialized in run(), with dups of the stdin/stdout/stderr pipes, and then passed down to
// sub-expressions. Compound expressions will clone() it, and redirections will modify it.
#[derive(Debug)]
pub struct IoContext {
stdin: IoValue,
stdout: IoValue,
stderr: IoValue,
stdout_capture: File,
stderr_capture: File,
dir: PathBuf,
env: HashMap<OsString, OsString>,
}
impl IoContext {
// Returns (context, stdout_reader, stderr_reader).
fn new() -> io::Result<(IoContext, ReaderThread, ReaderThread)> {
let (stdout_capture, stdout_reader) = try!(pipe_with_reader_thread());
let (stderr_capture, stderr_reader) = try!(pipe_with_reader_thread());
let mut env = HashMap::new();
for (name, val) in std::env::vars_os() {
env.insert(name, val);
}
let context = IoContext {
stdin: IoValue::ParentStdin,
stdout: IoValue::ParentStdout,
stderr: IoValue::ParentStderr,
stdout_capture: stdout_capture,
stderr_capture: stderr_capture,
dir: try!(std::env::current_dir()),
env: env,
};
Ok((context, stdout_reader, stderr_reader))
}
fn try_clone(&self) -> io::Result<IoContext> {
Ok(IoContext {
stdin: try!(self.stdin.try_clone()),
stdout: try!(self.stdout.try_clone()),
stderr: try!(self.stderr.try_clone()),
stdout_capture: try!(self.stdout_capture.try_clone()),
stderr_capture: try!(self.stderr_capture.try_clone()),
dir: self.dir.clone(),
env: self.env.clone(),
})
}
}
#[derive(Debug)]
enum IoValue {
ParentStdin,
ParentStdout,
ParentStderr,
Null,
File(File),
}
impl IoValue {
fn try_clone(&self) -> io::Result<IoValue> {
Ok(match self {
&IoValue::ParentStdin => IoValue::ParentStdin,
&IoValue::ParentStdout => IoValue::ParentStdout,
&IoValue::ParentStderr => IoValue::ParentStderr,
&IoValue::Null => IoValue::Null,
&IoValue::File(ref f) => IoValue::File(try!(f.try_clone())),
})
}
fn into_stdio(self) -> io::Result<Stdio> {
match self {
IoValue::ParentStdin => os_pipe::parent_stdin(),
IoValue::ParentStdout => os_pipe::parent_stdout(),
IoValue::ParentStderr => os_pipe::parent_stderr(),
IoValue::Null => Ok(Stdio::null()),
IoValue::File(f) => Ok(os_pipe::stdio_from_file(f)),
}
}
}
type ReaderThread = JoinHandle<io::Result<Vec<u8>>>;
fn pipe_with_reader_thread() -> io::Result<(File, ReaderThread)> {
let os_pipe::Pair { mut read, write } = try!(os_pipe::pipe());
let thread = std::thread::spawn(move || {
let mut output = Vec::new();
try!(read.read_to_end(&mut output));
Ok(output)
});
Ok((write, thread))
}
type WriterThread = crossbeam::ScopedJoinHandle<io::Result<()>>;
fn pipe_with_writer_thread<'a>(input: &'a [u8],
scope: &crossbeam::Scope<'a>)
-> io::Result<(File, WriterThread)> {
let os_pipe::Pair { read, mut write } = try!(os_pipe::pipe());
let thread = scope.spawn(move || {
try!(write.write_all(&input));
Ok(())
});
Ok((read, thread))
}
fn join_maybe_writer_thread(maybe_writer_thread: Option<WriterThread>) -> io::Result<()> {
if let Some(thread) = maybe_writer_thread {
if let Err(thread_error) = thread.join() {
// A broken pipe error happens if the process on the other end exits before
// we're done writing. We ignore those but return any other errors to the
// caller.
if thread_error.kind() != io::ErrorKind::BrokenPipe {
return Err(thread_error);
}
}
}
Ok(())
}
fn trim_right_newlines(s: &str) -> &str {
s.trim_right_matches(|c| c == '\n' || c == '\r')
}
#[cfg(test)]
mod test {
extern crate tempdir;
use self::tempdir::TempDir;
use super::*;
use std::collections::HashMap;
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::str;
#[cfg(unix)]
fn exe_filename(name: &str) -> String {
name.to_owned()
}
#[cfg(windows)]
fn exe_filename(name: &str) -> String {
format!("{}.exe", name)
}
fn path_to_test_binary(name: &str) -> PathBuf {
let test_project = Path::new(".").join("test").join(name);
// Build the test command.
sh("cargo build --quiet")
.dir(&test_project)
.run()
.expect(&format!("building test command '{}' returned an error", name));
// Return the path to the built binary.
test_project.join("target")
.join("debug")
.join(exe_filename(name))
.canonicalize()
.unwrap()
}
fn true_cmd() -> Expression {
cmd!(path_to_test_binary("status"), "0")
}
fn false_cmd() -> Expression {
cmd!(path_to_test_binary("status"), "1")
}
#[test]
fn test_cmd() {
// Windows compatible.
let output = cmd!(path_to_test_binary("echo"), "hi").read().unwrap();
assert_eq!("hi", output);
}
#[test]
fn test_sh() {
// Windows compatible.
let output = sh("echo hi").read().unwrap();
assert_eq!("hi", output);
}
#[test]
fn test_error() {
let result = false_cmd().run();
if let Err(Error::Status(output)) = result {
// Check that the status is non-zero.
assert!(!output.status.success());
} else {
panic!("Expected a status error.");
}
}
#[test]
fn test_unchecked() {
let unchecked_false = false_cmd().unchecked();
let output = unchecked_false.then(cmd!(path_to_test_binary("echo"), "waa"))
.then(unchecked_false)
.read()
.unwrap();
assert_eq!("waa", output);
}
#[test]
fn test_pipe() {
let output = sh("echo xxx").pipe(cmd!(path_to_test_binary("x_to_y"))).read().unwrap();
assert_eq!("yyy", output);
// Check that errors on either side are propagated.
let result = true_cmd().pipe(false_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
let result = false_cmd().pipe(true_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
}
#[test]
fn test_then() {
let output = true_cmd().then(sh("echo lo")).read().unwrap();
assert_eq!("lo", output);
// Check that errors on either side are propagated.
let result = true_cmd().then(false_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
let result = false_cmd().then(true_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
}
#[test]
fn test_input() {
// TODO: Fixed-length bytes input like b"foo" works poorly here. Why?
let expr = cmd!(path_to_test_binary("x_to_y")).input("xxx");
let output = expr.read().unwrap();
assert_eq!("yyy", output);
}
#[test]
fn test_null() {
let expr = cmd!(path_to_test_binary("cat"))
.null_stdin()
.null_stdout()
.null_stderr();
let output = expr.read().unwrap();
assert_eq!("", output);
}
#[test]
fn test_path() {
let dir = TempDir::new("test_path").unwrap();
let input_file = dir.path().join("input_file");
let output_file = dir.path().join("output_file");
File::create(&input_file).unwrap().write_all(b"xxx").unwrap();
let expr = cmd!(path_to_test_binary("x_to_y"))
.stdin(&input_file)
.stdout(&output_file);
let output = expr.read().unwrap();
assert_eq!("", output);
let mut file_output = String::new();
File::open(&output_file).unwrap().read_to_string(&mut file_output).unwrap();
assert_eq!("yyy", file_output);
}
#[test]
fn test_stderr_to_stdout() {
// Windows compatible. (Requires no space before the ">".)
let command = sh("echo hi>&2").stderr_to_stdout();
let output = command.read().unwrap();
assert_eq!("hi", output);
}
#[test]
fn test_file() {
let dir = TempDir::new("test_file").unwrap();
let file = dir.path().join("file");
File::create(&file).unwrap().write_all(b"example").unwrap();
let expr = cmd!(path_to_test_binary("cat")).stdin(File::open(&file).unwrap());
let output = expr.read().unwrap();
assert_eq!(output, "example");
}
#[test]
fn test_ergonomics() {
let mystr = "owned string".to_owned();
let mypathbuf = Path::new("a/b/c").to_owned();
let myvec = vec![1, 2, 3];
// These are nonsense expressions. We just want to make sure they compile.
let _ = sh("true").stdin(&*mystr).input(&*myvec).stdout(&*mypathbuf);
let _ = sh("true").stdin(&mystr).input(&myvec).stdout(&mypathbuf);
let _ = sh("true").stdin(mystr).input(myvec).stdout(mypathbuf);
}
#[test]
fn test_capture_both() {
// Windows compatible, no space before ">", and we trim newlines at the end to avoid
// dealing with the different kinds.
let output = sh("echo hi")
.then(sh("echo lo>&2"))
.capture_stdout()
.capture_stderr()
.run()
.unwrap();
assert_eq!("hi", str::from_utf8(&output.stdout).unwrap().trim());
assert_eq!("lo", str::from_utf8(&output.stderr).unwrap().trim());
}
#[test]
fn test_dir() {
let pwd = cmd!(path_to_test_binary("pwd"));
// First assert that ordinary commands happen in the parent's dir.
let pwd_output = pwd.read().unwrap();
let pwd_path = Path::new(&pwd_output);
assert_eq!(pwd_path, env::current_dir().unwrap());
// Now create a temp dir and make sure we can set dir to it.
let dir = TempDir::new("duct_test").unwrap();
let pwd_output = pwd.dir(dir.path()).read().unwrap();
let pwd_path = Path::new(&pwd_output);
assert_eq!(pwd_path, dir.path());
}
#[test]
fn test_env() {
let output = cmd!(path_to_test_binary("print_env"), "foo")
.env("foo", "bar")
.read()
.unwrap();
assert_eq!("bar", output);
}
#[test]
fn test_full_env() {
let var_name = "test_env_remove_var";
// Capture the parent env, and make sure it does *not* contain our variable.
let mut clean_env: HashMap<OsString, OsString> = env::vars_os().collect();
clean_env.remove(AsRef::<OsStr>::as_ref(var_name));
// Run a child process with that map passed to full_env(). It should be guaranteed not to
// see our variable, regardless of any outer env() calls or changes in the parent.
let clean_child = cmd!(path_to_test_binary("print_env"), var_name).full_env(clean_env);
// Dirty the parent env. Should be suppressed.
env::set_var(var_name, "junk1");
// And make an outer env() call. Should also be suppressed.
let dirty_child = clean_child.env(var_name, "junk2");
// Check that neither of those have any effect.
let output = dirty_child.read().unwrap();
assert_eq!("", output);
}
#[test]
fn test_broken_pipe() {
// If the input writing thread fills up its pipe buffer, writing will block. If the process
// on the other end of the pipe exits while writer is waiting, the write will return an
// error. We need to swallow that error, rather than returning it.
let myvec = vec![0; 1_000_000];
true_cmd().input(myvec).run().unwrap();
}
}
better coverage for swapping
extern crate crossbeam;
extern crate os_pipe;
use std::borrow::Borrow;
use std::collections::HashMap;
use std::ffi::{OsStr, OsString};
use std::fs::File;
use std::io;
use std::io::prelude::*;
#[cfg(unix)]
use std::os::unix::process::ExitStatusExt;
#[cfg(windows)]
use std::os::windows::process::ExitStatusExt;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio, Output, ExitStatus};
use std::thread::JoinHandle;
use std::sync::Arc;
// enums defined below
use ExpressionInner::*;
use IoExpressionInner::*;
pub fn cmd<T: AsRef<OsStr>>(argv: &[T]) -> Expression {
let argv_vec = argv.iter().map(|arg| arg.as_ref().to_owned()).collect();
Expression::new(Cmd(argv_vec))
}
#[macro_export]
macro_rules! cmd {
( $( $x:expr ),* ) => {
{
use std::ffi::OsStr;
let mut temp_vec = Vec::new();
$(
let temp_arg = $x;
let temp_osstr: &OsStr = temp_arg.as_ref();
temp_vec.push(temp_osstr.to_owned());
)*
$crate::cmd(&temp_vec)
}
};
}
pub fn sh<T: AsRef<OsStr>>(command: T) -> Expression {
Expression::new(Sh(command.as_ref().to_owned()))
}
#[derive(Clone, Debug)]
#[must_use]
pub struct Expression {
inner: Arc<ExpressionInner>,
}
impl Expression {
pub fn run(&self) -> Result<Output, Error> {
let (context, stdout_reader, stderr_reader) = try!(IoContext::new());
let status = try!(self.inner.exec(context));
let stdout_vec = try!(stdout_reader.join().unwrap());
let stderr_vec = try!(stderr_reader.join().unwrap());
let output = Output {
status: status,
stdout: stdout_vec,
stderr: stderr_vec,
};
if !output.status.success() {
Err(Error::Status(output))
} else {
Ok(output)
}
}
pub fn read(&self) -> Result<String, Error> {
let output = try!(self.capture_stdout().run());
let output_str = try!(std::str::from_utf8(&output.stdout));
Ok(trim_right_newlines(output_str).to_owned())
}
pub fn pipe<T: Borrow<Expression>>(&self, right: T) -> Expression {
Self::new(Pipe(self.clone(), right.borrow().clone()))
}
pub fn then<T: Borrow<Expression>>(&self, right: T) -> Expression {
Self::new(Then(self.clone(), right.borrow().clone()))
}
pub fn input<T: AsRef<[u8]>>(&self, input: T) -> Self {
Self::new(Io(Input(input.as_ref().to_vec()), self.clone()))
}
pub fn stdin<T: Into<FileOpener>>(&self, stdin: T) -> Self {
Self::new(Io(Stdin(stdin.into()), self.clone()))
}
pub fn null_stdin(&self) -> Self {
Self::new(Io(StdinNull, self.clone()))
}
pub fn stdout<T: Into<FileOpener>>(&self, stdout: T) -> Self {
Self::new(Io(Stdout(stdout.into()), self.clone()))
}
pub fn null_stdout(&self) -> Self {
Self::new(Io(StdoutNull, self.clone()))
}
pub fn capture_stdout(&self) -> Self {
Self::new(Io(StdoutCapture, self.clone()))
}
pub fn stdout_to_stderr(&self) -> Self {
Self::new(Io(StdoutToStderr, self.clone()))
}
pub fn stderr<T: Into<FileOpener>>(&self, stderr: T) -> Self {
Self::new(Io(Stderr(stderr.into()), self.clone()))
}
pub fn null_stderr(&self) -> Self {
Self::new(Io(StderrNull, self.clone()))
}
pub fn capture_stderr(&self) -> Self {
Self::new(Io(StderrCapture, self.clone()))
}
pub fn stderr_to_stdout(&self) -> Self {
Self::new(Io(StderrToStdout, self.clone()))
}
pub fn dir<T: AsRef<Path>>(&self, path: T) -> Self {
Self::new(Io(Dir(path.as_ref().to_owned()), self.clone()))
}
pub fn env<T: AsRef<OsStr>, U: AsRef<OsStr>>(&self, name: T, val: U) -> Self {
Self::new(Io(Env(name.as_ref().to_owned(), val.as_ref().to_owned()),
self.clone()))
}
pub fn full_env<T, U, V>(&self, name_vals: T) -> Self
where T: IntoIterator<Item = (U, V)>,
U: AsRef<OsStr>,
V: AsRef<OsStr>
{
let env_map = name_vals.into_iter()
.map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))
.collect();
Self::new(Io(FullEnv(env_map), self.clone()))
}
pub fn unchecked(&self) -> Self {
Self::new(Io(Unchecked, self.clone()))
}
fn new(inner: ExpressionInner) -> Self {
Expression { inner: Arc::new(inner) }
}
}
#[derive(Debug)]
enum ExpressionInner {
Cmd(Vec<OsString>),
Sh(OsString),
Pipe(Expression, Expression),
Then(Expression, Expression),
Io(IoExpressionInner, Expression),
}
impl ExpressionInner {
fn exec(&self, context: IoContext) -> io::Result<ExitStatus> {
match *self {
Cmd(ref argv) => exec_argv(argv, context),
Sh(ref command) => exec_sh(command, context),
Pipe(ref left, ref right) => exec_pipe(left, right, context),
Then(ref left, ref right) => exec_then(left, right, context),
Io(ref io_inner, ref expr) => exec_io(io_inner, expr, context),
}
}
}
fn exec_argv<T: AsRef<OsStr>>(argv: &[T], context: IoContext) -> io::Result<ExitStatus> {
let mut command = Command::new(&argv[0]);
command.args(&argv[1..]);
// TODO: Avoid unnecessary dup'ing here.
command.stdin(try!(context.stdin.into_stdio()));
command.stdout(try!(context.stdout.into_stdio()));
command.stderr(try!(context.stderr.into_stdio()));
command.current_dir(context.dir);
command.env_clear();
for (name, val) in context.env {
command.env(name, val);
}
Ok(try!(command.status()))
}
#[cfg(unix)]
fn shell_command_argv(command: OsString) -> [OsString; 3] {
[OsStr::new("/bin/sh").to_owned(), OsStr::new("-c").to_owned(), command]
}
#[cfg(windows)]
fn shell_command_argv(command: OsString) -> [OsString; 3] {
let comspec = std::env::var_os("COMSPEC").unwrap_or(OsStr::new("cmd.exe").to_owned());
[comspec, OsStr::new("/C").to_owned(), command]
}
fn exec_sh<T: AsRef<OsStr>>(command: T, context: IoContext) -> io::Result<ExitStatus> {
exec_argv(&shell_command_argv(command.as_ref().to_owned()), context)
}
fn exec_pipe(left: &Expression, right: &Expression, context: IoContext) -> io::Result<ExitStatus> {
let pair = try!(os_pipe::pipe());
let mut left_context = try!(context.try_clone()); // dup'ing stdin/stdout isn't strictly necessary, but no big deal
left_context.stdout = IoValue::File(pair.write);
let mut right_context = context;
right_context.stdin = IoValue::File(pair.read);
let (left_result, right_result) = crossbeam::scope(|scope| {
let left_joiner = scope.spawn(|| left.inner.exec(left_context));
let right_result = right.inner.exec(right_context);
let left_result = left_joiner.join();
(left_result, right_result)
});
let right_status = try!(right_result);
let left_status = try!(left_result);
if !right_status.success() {
Ok(right_status)
} else {
Ok(left_status)
}
}
fn exec_then(left: &Expression, right: &Expression, context: IoContext) -> io::Result<ExitStatus> {
let status = try!(left.inner.exec(try!(context.try_clone())));
if !status.success() {
Ok(status)
} else {
right.inner.exec(context)
}
}
fn exec_io(io_inner: &IoExpressionInner,
expr: &Expression,
context: IoContext)
-> io::Result<ExitStatus> {
{
crossbeam::scope(|scope| {
let (new_context, maybe_writer_thread) = try!(io_inner.update_context(context, scope));
let exec_result = expr.inner.exec(new_context);
let writer_result = join_maybe_writer_thread(maybe_writer_thread);
// Propagate any exec errors first.
let exec_status = try!(exec_result);
// Then propagate any writer thread errors.
try!(writer_result);
// Finally, implement unchecked() status suppression here.
if let &Unchecked = io_inner {
Ok(ExitStatus::from_raw(0))
} else {
Ok(exec_status)
}
})
}
}
#[derive(Debug)]
enum IoExpressionInner {
Input(Vec<u8>),
Stdin(FileOpener),
StdinNull,
Stdout(FileOpener),
StdoutNull,
StdoutCapture,
StdoutToStderr,
Stderr(FileOpener),
StderrNull,
StderrCapture,
StderrToStdout,
Dir(PathBuf),
Env(OsString, OsString),
FullEnv(HashMap<OsString, OsString>),
Unchecked,
}
impl IoExpressionInner {
fn update_context<'a>(&'a self,
mut context: IoContext,
scope: &crossbeam::Scope<'a>)
-> io::Result<(IoContext, Option<WriterThread>)> {
let mut maybe_thread = None;
match *self {
Input(ref v) => {
let (reader, thread) = try!(pipe_with_writer_thread(v, scope));
context.stdin = IoValue::File(reader);
maybe_thread = Some(thread)
}
Stdin(ref f) => {
context.stdin = IoValue::File(try!(f.open_for_reading()));
}
StdinNull => {
context.stdin = IoValue::Null;
}
Stdout(ref f) => {
context.stdout = IoValue::File(try!(f.open_for_writing()));
}
StdoutNull => {
context.stdout = IoValue::Null;
}
StdoutCapture => {
context.stdout = IoValue::File(try!(context.stdout_capture.try_clone()))
}
StdoutToStderr => {
context.stdout = try!(context.stderr.try_clone());
}
Stderr(ref f) => {
context.stderr = IoValue::File(try!(f.open_for_writing()));
}
StderrNull => {
context.stderr = IoValue::Null;
}
StderrCapture => {
context.stderr = IoValue::File(try!(context.stderr_capture.try_clone()))
}
StderrToStdout => {
context.stderr = try!(context.stdout.try_clone());
}
Dir(ref p) => {
context.dir = p.clone();
}
Env(ref name, ref val) => {
context.env.insert(name.clone(), val.clone());
}
FullEnv(ref map) => {
context.env = map.clone();
}
Unchecked => {
// No-op. Unchecked is handled in exec_io().
}
}
Ok((context, maybe_thread))
}
}
#[derive(Debug)]
pub enum FileOpener {
PathBuf(PathBuf),
File(File),
}
impl FileOpener {
fn open_for_reading(&self) -> io::Result<File> {
match *self {
FileOpener::PathBuf(ref p) => File::open(p),
FileOpener::File(ref f) => f.try_clone(),
}
}
fn open_for_writing(&self) -> io::Result<File> {
match *self {
FileOpener::PathBuf(ref p) => File::create(p),
FileOpener::File(ref f) => f.try_clone(),
}
}
}
impl From<File> for FileOpener {
fn from(f: File) -> FileOpener {
FileOpener::File(f)
}
}
// TODO: Get rid of most of these impl's once specialization lands.
impl<'a> From<&'a str> for FileOpener {
fn from(s: &str) -> FileOpener {
FileOpener::PathBuf(AsRef::<Path>::as_ref(s).to_owned())
}
}
impl<'a> From<&'a String> for FileOpener {
fn from(s: &String) -> FileOpener {
FileOpener::PathBuf(s.clone().into())
}
}
impl From<String> for FileOpener {
fn from(s: String) -> FileOpener {
FileOpener::PathBuf(s.into())
}
}
impl<'a> From<&'a Path> for FileOpener {
fn from(p: &Path) -> FileOpener {
FileOpener::PathBuf(p.to_owned())
}
}
impl<'a> From<&'a PathBuf> for FileOpener {
fn from(p: &PathBuf) -> FileOpener {
FileOpener::PathBuf(p.clone())
}
}
impl From<PathBuf> for FileOpener {
fn from(p: PathBuf) -> FileOpener {
FileOpener::PathBuf(p)
}
}
impl<'a> From<&'a OsStr> for FileOpener {
fn from(s: &OsStr) -> FileOpener {
FileOpener::PathBuf(s.clone().into())
}
}
impl<'a> From<&'a OsString> for FileOpener {
fn from(s: &OsString) -> FileOpener {
FileOpener::PathBuf(AsRef::<Path>::as_ref(s).to_owned())
}
}
impl From<OsString> for FileOpener {
fn from(s: OsString) -> FileOpener {
FileOpener::PathBuf(s.into())
}
}
#[derive(Debug)]
pub enum Error {
Io(io::Error),
Utf8(std::str::Utf8Error),
Status(Output),
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(err: std::str::Utf8Error) -> Error {
Error::Utf8(err)
}
}
// An IoContext represents the file descriptors child processes are talking to at execution time.
// It's initialized in run(), with dups of the stdin/stdout/stderr pipes, and then passed down to
// sub-expressions. Compound expressions will clone() it, and redirections will modify it.
#[derive(Debug)]
pub struct IoContext {
stdin: IoValue,
stdout: IoValue,
stderr: IoValue,
stdout_capture: File,
stderr_capture: File,
dir: PathBuf,
env: HashMap<OsString, OsString>,
}
impl IoContext {
// Returns (context, stdout_reader, stderr_reader).
fn new() -> io::Result<(IoContext, ReaderThread, ReaderThread)> {
let (stdout_capture, stdout_reader) = try!(pipe_with_reader_thread());
let (stderr_capture, stderr_reader) = try!(pipe_with_reader_thread());
let mut env = HashMap::new();
for (name, val) in std::env::vars_os() {
env.insert(name, val);
}
let context = IoContext {
stdin: IoValue::ParentStdin,
stdout: IoValue::ParentStdout,
stderr: IoValue::ParentStderr,
stdout_capture: stdout_capture,
stderr_capture: stderr_capture,
dir: try!(std::env::current_dir()),
env: env,
};
Ok((context, stdout_reader, stderr_reader))
}
fn try_clone(&self) -> io::Result<IoContext> {
Ok(IoContext {
stdin: try!(self.stdin.try_clone()),
stdout: try!(self.stdout.try_clone()),
stderr: try!(self.stderr.try_clone()),
stdout_capture: try!(self.stdout_capture.try_clone()),
stderr_capture: try!(self.stderr_capture.try_clone()),
dir: self.dir.clone(),
env: self.env.clone(),
})
}
}
#[derive(Debug)]
enum IoValue {
ParentStdin,
ParentStdout,
ParentStderr,
Null,
File(File),
}
impl IoValue {
fn try_clone(&self) -> io::Result<IoValue> {
Ok(match self {
&IoValue::ParentStdin => IoValue::ParentStdin,
&IoValue::ParentStdout => IoValue::ParentStdout,
&IoValue::ParentStderr => IoValue::ParentStderr,
&IoValue::Null => IoValue::Null,
&IoValue::File(ref f) => IoValue::File(try!(f.try_clone())),
})
}
fn into_stdio(self) -> io::Result<Stdio> {
match self {
IoValue::ParentStdin => os_pipe::parent_stdin(),
IoValue::ParentStdout => os_pipe::parent_stdout(),
IoValue::ParentStderr => os_pipe::parent_stderr(),
IoValue::Null => Ok(Stdio::null()),
IoValue::File(f) => Ok(os_pipe::stdio_from_file(f)),
}
}
}
type ReaderThread = JoinHandle<io::Result<Vec<u8>>>;
fn pipe_with_reader_thread() -> io::Result<(File, ReaderThread)> {
let os_pipe::Pair { mut read, write } = try!(os_pipe::pipe());
let thread = std::thread::spawn(move || {
let mut output = Vec::new();
try!(read.read_to_end(&mut output));
Ok(output)
});
Ok((write, thread))
}
type WriterThread = crossbeam::ScopedJoinHandle<io::Result<()>>;
fn pipe_with_writer_thread<'a>(input: &'a [u8],
scope: &crossbeam::Scope<'a>)
-> io::Result<(File, WriterThread)> {
let os_pipe::Pair { read, mut write } = try!(os_pipe::pipe());
let thread = scope.spawn(move || {
try!(write.write_all(&input));
Ok(())
});
Ok((read, thread))
}
fn join_maybe_writer_thread(maybe_writer_thread: Option<WriterThread>) -> io::Result<()> {
if let Some(thread) = maybe_writer_thread {
if let Err(thread_error) = thread.join() {
// A broken pipe error happens if the process on the other end exits before
// we're done writing. We ignore those but return any other errors to the
// caller.
if thread_error.kind() != io::ErrorKind::BrokenPipe {
return Err(thread_error);
}
}
}
Ok(())
}
fn trim_right_newlines(s: &str) -> &str {
s.trim_right_matches(|c| c == '\n' || c == '\r')
}
#[cfg(test)]
mod test {
extern crate tempdir;
use self::tempdir::TempDir;
use super::*;
use std::collections::HashMap;
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::str;
#[cfg(unix)]
fn exe_filename(name: &str) -> String {
name.to_owned()
}
#[cfg(windows)]
fn exe_filename(name: &str) -> String {
format!("{}.exe", name)
}
fn path_to_test_binary(name: &str) -> PathBuf {
let test_project = Path::new(".").join("test").join(name);
// Build the test command.
sh("cargo build --quiet")
.dir(&test_project)
.run()
.expect(&format!("building test command '{}' returned an error", name));
// Return the path to the built binary.
test_project.join("target")
.join("debug")
.join(exe_filename(name))
.canonicalize()
.unwrap()
}
fn true_cmd() -> Expression {
cmd!(path_to_test_binary("status"), "0")
}
fn false_cmd() -> Expression {
cmd!(path_to_test_binary("status"), "1")
}
#[test]
fn test_cmd() {
// Windows compatible.
let output = cmd!(path_to_test_binary("echo"), "hi").read().unwrap();
assert_eq!("hi", output);
}
#[test]
fn test_sh() {
// Windows compatible.
let output = sh("echo hi").read().unwrap();
assert_eq!("hi", output);
}
#[test]
fn test_error() {
let result = false_cmd().run();
if let Err(Error::Status(output)) = result {
// Check that the status is non-zero.
assert!(!output.status.success());
} else {
panic!("Expected a status error.");
}
}
#[test]
fn test_unchecked() {
let unchecked_false = false_cmd().unchecked();
let output = unchecked_false.then(cmd!(path_to_test_binary("echo"), "waa"))
.then(unchecked_false)
.read()
.unwrap();
assert_eq!("waa", output);
}
#[test]
fn test_pipe() {
let output = sh("echo xxx").pipe(cmd!(path_to_test_binary("x_to_y"))).read().unwrap();
assert_eq!("yyy", output);
// Check that errors on either side are propagated.
let result = true_cmd().pipe(false_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
let result = false_cmd().pipe(true_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
}
#[test]
fn test_then() {
let output = true_cmd().then(sh("echo lo")).read().unwrap();
assert_eq!("lo", output);
// Check that errors on either side are propagated.
let result = true_cmd().then(false_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
let result = false_cmd().then(true_cmd()).run();
match result {
Err(Error::Status(output)) => {
assert!(output.status.code().unwrap() == 1);
}
_ => panic!("should never get here"),
}
}
#[test]
fn test_input() {
// TODO: Fixed-length bytes input like b"foo" works poorly here. Why?
let expr = cmd!(path_to_test_binary("x_to_y")).input("xxx");
let output = expr.read().unwrap();
assert_eq!("yyy", output);
}
#[test]
fn test_null() {
let expr = cmd!(path_to_test_binary("cat"))
.null_stdin()
.null_stdout()
.null_stderr();
let output = expr.read().unwrap();
assert_eq!("", output);
}
#[test]
fn test_path() {
let dir = TempDir::new("test_path").unwrap();
let input_file = dir.path().join("input_file");
let output_file = dir.path().join("output_file");
File::create(&input_file).unwrap().write_all(b"xxx").unwrap();
let expr = cmd!(path_to_test_binary("x_to_y"))
.stdin(&input_file)
.stdout(&output_file);
let output = expr.read().unwrap();
assert_eq!("", output);
let mut file_output = String::new();
File::open(&output_file).unwrap().read_to_string(&mut file_output).unwrap();
assert_eq!("yyy", file_output);
}
#[test]
fn test_swapping() {
let output = sh("echo hi")
.stdout_to_stderr()
.capture_stderr()
.run()
.unwrap();
let stderr = str::from_utf8(&output.stderr).unwrap().trim();
assert_eq!("hi", stderr);
// Windows compatible. (Requires no space before the ">".)
let output = sh("echo hi>&2").stderr_to_stdout().read().unwrap();
assert_eq!("hi", output);
}
#[test]
fn test_file() {
let dir = TempDir::new("test_file").unwrap();
let file = dir.path().join("file");
File::create(&file).unwrap().write_all(b"example").unwrap();
let expr = cmd!(path_to_test_binary("cat")).stdin(File::open(&file).unwrap());
let output = expr.read().unwrap();
assert_eq!(output, "example");
}
#[test]
fn test_ergonomics() {
let mystr = "owned string".to_owned();
let mypathbuf = Path::new("a/b/c").to_owned();
let myvec = vec![1, 2, 3];
// These are nonsense expressions. We just want to make sure they compile.
let _ = sh("true").stdin(&*mystr).input(&*myvec).stdout(&*mypathbuf);
let _ = sh("true").stdin(&mystr).input(&myvec).stdout(&mypathbuf);
let _ = sh("true").stdin(mystr).input(myvec).stdout(mypathbuf);
}
#[test]
fn test_capture_both() {
// Windows compatible, no space before ">", and we trim newlines at the end to avoid
// dealing with the different kinds.
let output = sh("echo hi")
.then(sh("echo lo>&2"))
.capture_stdout()
.capture_stderr()
.run()
.unwrap();
assert_eq!("hi", str::from_utf8(&output.stdout).unwrap().trim());
assert_eq!("lo", str::from_utf8(&output.stderr).unwrap().trim());
}
#[test]
fn test_dir() {
let pwd = cmd!(path_to_test_binary("pwd"));
// First assert that ordinary commands happen in the parent's dir.
let pwd_output = pwd.read().unwrap();
let pwd_path = Path::new(&pwd_output);
assert_eq!(pwd_path, env::current_dir().unwrap());
// Now create a temp dir and make sure we can set dir to it.
let dir = TempDir::new("duct_test").unwrap();
let pwd_output = pwd.dir(dir.path()).read().unwrap();
let pwd_path = Path::new(&pwd_output);
assert_eq!(pwd_path, dir.path());
}
#[test]
fn test_env() {
let output = cmd!(path_to_test_binary("print_env"), "foo")
.env("foo", "bar")
.read()
.unwrap();
assert_eq!("bar", output);
}
#[test]
fn test_full_env() {
let var_name = "test_env_remove_var";
// Capture the parent env, and make sure it does *not* contain our variable.
let mut clean_env: HashMap<OsString, OsString> = env::vars_os().collect();
clean_env.remove(AsRef::<OsStr>::as_ref(var_name));
// Run a child process with that map passed to full_env(). It should be guaranteed not to
// see our variable, regardless of any outer env() calls or changes in the parent.
let clean_child = cmd!(path_to_test_binary("print_env"), var_name).full_env(clean_env);
// Dirty the parent env. Should be suppressed.
env::set_var(var_name, "junk1");
// And make an outer env() call. Should also be suppressed.
let dirty_child = clean_child.env(var_name, "junk2");
// Check that neither of those have any effect.
let output = dirty_child.read().unwrap();
assert_eq!("", output);
}
#[test]
fn test_broken_pipe() {
// If the input writing thread fills up its pipe buffer, writing will block. If the process
// on the other end of the pipe exits while writer is waiting, the write will return an
// error. We need to swallow that error, rather than returning it.
let myvec = vec![0; 1_000_000];
true_cmd().input(myvec).run().unwrap();
}
}
|
extern crate libc;
pub mod instruction;
pub mod constants;
mod ffi;
pub mod capstone;
pub use instruction::*;
pub use constants::*;
pub use capstone::Capstone;
/// An opaque reference to a capstone engine.
///
/// bindgen by default used this type name everywhere, so it is easier to leave it with a confusing
/// name.
///
/// It should not be exported, rust's new visibility rules make tackling this not immediately
/// obvious
#[allow(non_camel_case_types)]
type csh = libc::c_ulong;
#[cfg(test)]
mod test {
use super::*;
static CODE: &'static [u8] = b"\x55\x48\x8b\x05\xb8\x13\x00\x00";
#[test]
fn test_x86_simple() {
match capstone::Capstone::new(constants::CsArch::ARCH_X86, constants::CsMode::MODE_64) {
Some(cs) => {
if let Some(insns) = cs.disasm(CODE, 0x1000, 0) {
assert_eq!(insns.len(), 2);
let is: Vec<_> = insns.iter().collect();
assert_eq!(is[0].mnemonic().unwrap(), "push");
assert_eq!(is[1].mnemonic().unwrap(), "mov");
assert_eq!(is[0].address, 0x1000);
assert_eq!(is[1].address, 0x1001);
} else {
assert!(false, "Couldn't disasm instructions")
}
let reg_id = 1;
match cs.reg_name(reg_id) {
Some(reg_name) => assert_eq!(reg_name, "ah"),
None => assert!(false, "Couldn't get register name"),
}
let insn_id = 1;
match cs.insn_name(insn_id) {
Some(insn_name) => assert_eq!(insn_name, "aaa"),
None => assert!(false, "Couldn't get instruction name"),
}
}
None => {
assert!(false, "Couldn't create a cs engine");
}
}
}
}
Update tests for new format
extern crate libc;
pub mod instruction;
pub mod constants;
mod ffi;
pub mod capstone;
pub use instruction::*;
pub use constants::*;
pub use capstone::Capstone;
/// An opaque reference to a capstone engine.
///
/// bindgen by default used this type name everywhere, so it is easier to leave it with a confusing
/// name.
///
/// It should not be exported, rust's new visibility rules make tackling this not immediately
/// obvious
#[allow(non_camel_case_types)]
type csh = libc::c_ulong;
#[cfg(test)]
mod test {
use super::*;
static CODE: &'static [u8] = b"\x55\x48\x8b\x05\xb8\x13\x00\x00";
#[test]
fn test_x86_simple() {
match capstone::Capstone::new(constants::CsArch::ARCH_X86, constants::CsMode::MODE_64) {
Ok(cs) => {
match cs.disasm(CODE, 0x1000, 0) {
Ok(insns) => {
assert_eq!(insns.len(), 2);
let is: Vec<_> = insns.iter().collect();
assert_eq!(is[0].mnemonic().unwrap(), "push");
assert_eq!(is[1].mnemonic().unwrap(), "mov");
assert_eq!(is[0].address, 0x1000);
assert_eq!(is[1].address, 0x1001);
},
Err(err) => {
assert!(false, format!("Couldn't disasm instructions: {}", err.to_string()))
}
}
let reg_id = 1;
match cs.reg_name(reg_id) {
Ok(reg_name) => assert_eq!(reg_name, "ah"),
Err(err) => assert!(false, format!("Couldn't get register name: {}", err.to_string())),
}
let insn_id = 1;
match cs.insn_name(insn_id) {
Ok(insn_name) => assert_eq!(insn_name, "aaa"),
Err(err) => assert!(false, format!("Couldn't get instruction name: {}", err.to_string())),
}
}
Err(e) => {
assert!(false, format!("Couldn't create a cs engine: {}", e.to_string()));
}
}
}
}
|
// (c) 2015 Joost Yervante Damad <joost@damad.be>
// loosely based on https://github.com/cgaebel/sexp
// latest version can be found at https://github.com/andete/rust_sexp
use std::fmt;
use std::str::FromStr;
use std::f64;
use std::fs::File;
use std::io::prelude::*;
pub enum Atom {
S(String),
Q(String),
I(i64),
F(f64),
}
impl Atom {
pub fn f(&self) -> Result<f64,String> {
match *self {
Atom::F(f) => Ok(f),
Atom::I(i) => Ok(i as f64),
ref x => Err(format!("not a float: {}", x))
}
}
pub fn i(&self) -> Result<i64,String> {
match *self {
Atom::I(i) => Ok(i),
ref x => Err(format!("not an int: {}", x))
}
}
pub fn string(&self) -> Result<String,String> {
match *self {
Atom::S(ref s) => Ok(s.clone()),
Atom::Q(ref s) => Ok(s.clone()),
ref x => Err(format!("not a string: {}", x))
}
}
pub fn as_string(&self) -> Result<String,String> {
match *self {
Atom::S(ref s) => Ok(s.clone()),
Atom::Q(ref s) => Ok(s.clone()),
Atom::F(ref s) => Ok(format!("{}", s)),
Atom::I(ref s) => Ok(format!("{}", s)),
}
}
}
pub enum Sexp {
Atom(Atom),
Empty,
List(Vec<Sexp>),
}
impl Sexp {
pub fn atom(&self) -> Result<&Atom,String> {
match *self {
Sexp::Atom(ref f) => Ok(f),
ref x => Err(format!("not an atom: {}", x))
}
}
pub fn list(&self) -> Result<&Vec<Sexp>,String> {
match *self {
Sexp::List(ref v) => Ok(v),
ref x => Err(format!("not a list: {}", x))
}
}
}
pub struct ParseError {
msg: &'static str,
line: usize,
lpos: usize,
}
pub enum Error {
ParseError(ParseError),
IOError(std::io::Error),
}
pub type ERes<T> = Result<T, Error>;
fn err_parse<T>(msg: &'static str, state: &ParseState) -> ERes<T> {
Err(Error::ParseError(ParseError { msg: msg, line: state.line, lpos: state.lpos}))
}
fn err_io<T>(err: std::io::Error) -> ERes<T> {
Err(Error::IOError(err))
}
struct ParseState {
pos: usize,
line: usize,
lpos: usize,
vec: Vec<char>,
len: usize,
}
impl ParseState {
fn peek(&self) -> ERes<char> {
if self.pos >= self.len {
return err_parse("end of document reached", self)
}
Ok(self.vec[self.pos])
}
fn peek_option(&self) -> Option<char> {
if self.pos >= self.len {
return None
}
Some(self.vec[self.pos])
}
fn next(&mut self) -> ERes<()> {
if self.pos >= self.len {
return err_parse("end of document reached", self)
}
let c = self.vec[self.pos];
self.pos += 1;
match c {
'\r' | '\n' => {
self.lpos = 0;
self.line += 1;
}
_ => {
self.lpos += 1;
}
}
Ok(())
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Error::ParseError(ref p) =>
write!(f, "Parse Error {}:{}: {}", p.line, p.lpos, p.msg),
Error::IOError(ref i) =>
write!(f, "{}", i)
}
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Error::ParseError(ref p) =>
write!(f, "Parse Error {}:{}: {}", p.line, p.lpos, p.msg),
Error::IOError(ref i) =>
write!(f, "{}", i)
}
}
}
impl fmt::Display for Atom {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Atom::S(ref s) => write!(f, "{}", s),
Atom::Q(ref s) => write!(f, "\"{}\"", s),
Atom::I(i) => write!(f, "{}", i),
Atom::F(d) => {
let z = d.floor();
if d - z < f64::EPSILON {
write!(f, "{}.0", z)
} else {
write!(f, "{}", d)
}
}
}
}
}
impl fmt::Display for Sexp {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Sexp::Atom(ref a) => write!(f, "{}", a),
Sexp::List(ref xs) => {
try!(write!(f, "("));
for (i, x) in xs.iter().enumerate() {
let s = if i == 0 { "" } else { " " };
try!(write!(f, "{}{}", s, x));
}
write!(f, ")")
},
Sexp::Empty => write!(f, ""),
}
}
}
fn parse_list(state: &mut ParseState) -> ERes<Sexp> {
//println!("list");
try!(state.next()); // skip (
let mut l: Vec<Sexp> = Vec::new();
loop {
match try!(state.peek()) {
')' => {
try!(state.next());
break;
}
_ => {
l.push(try!(parse_sexp(state)));
}
}
}
//println!("Found list");
Ok(Sexp::List(l))
}
fn parse_quoted_string(state: &mut ParseState) -> ERes<Atom> {
//println!("qstring");
try!(state.next()); // skip "
let mut s = String::new();
loop {
match try!(state.peek()) {
'"' => {
try!(state.next());
break
}
x @ '\r' | x @ '\n' => {
s.push(x);
try!(state.next());
}
x => {
s.push(x);
try!(state.next());
}
}
}
//println!("Found quoted string {}", s);
Ok(Atom::Q(s))
}
fn parse_string(state: &mut ParseState) -> ERes<Atom> {
//println!("string");
let mut s = String::new();
loop {
match state.peek_option() {
Some(x) => {
match x {
' ' | '\t' | '\r' | '\n' | ')' => break,
'"' => return err_parse("unexpected \" in string", state),
x => s.push(x),
}
}
None => {
break;
}
}
try!(state.next())
}
//println!("Found string {}", s);
Ok(Atom::S(s))
}
fn parse_number(state: &mut ParseState) -> ERes<Atom> {
//println!("number");
let mut s = String::new();
loop {
match state.peek_option() {
Some(x) => {
match x {
' ' | '\r' | '\n' | '\t' | ')' => {
break
},
'0' ... '9' | '.' | '-' => {
s.push(state.vec[state.pos])
},
_ => {
return err_parse("unexpected char in number", state)
},
}
}
None => {
break
}
}
try!(state.next())
}
//println!("Found number {}", s);
let s2: &str = &s[..];
if s.contains('.') {
Ok(Atom::F(f64::from_str(s2).unwrap()))
} else {
Ok(Atom::I(i64::from_str(s2).unwrap()))
}
}
fn parse_atom(state: &mut ParseState) -> ERes<Sexp> {
//println!("atom");
let a = match try!(state.peek()) {
'"' => {
try!(parse_quoted_string(state))
}
'0' ... '9' | '.' | '-' => {
try!(parse_number(state))
}
_ => {
try!(parse_string(state))
}
};
Ok(Sexp::Atom(a))
}
fn parse_sexp(state: &mut ParseState) -> ERes<Sexp> {
loop {
match try!(state.peek()) {
'(' => {
return parse_list(state)
}
' ' | '\t' | '\r' | '\n' => {
try!(state.next());
}
')' => {
return err_parse("unmatched )", state)
}
_ => {
return parse_atom(state)
}
}
}
}
fn parse(data: &str) -> ERes<Sexp> {
if data.len() == 0 {
Ok(Sexp::Empty)
} else {
let vec: Vec<char> = data.chars().collect();
let len = vec.len();
let state = &mut ParseState { pos: 0, line: 1, lpos: 0, vec: vec, len: len };
parse_sexp(state)
}
}
fn read_file(name: &str) -> Result<String, std::io::Error> {
let mut f = try!(File::open(name));
let mut s = String::new();
try!(f.read_to_string(&mut s));
Ok(s)
}
pub fn parse_str(s: &str) -> ERes<Sexp> {
parse(s)
}
pub fn parse_file(name: &str) -> ERes<Sexp> {
let s = try!(match read_file(name) {
Ok(s) => Ok(s),
Err(x) => err_io(x),
});
parse(&s[..])
}
#[cfg(test)]
mod tests {
use super::*;
#[allow(dead_code)]
fn check_parse(s: &str) {
let e = parse_str(s).unwrap();
let t = format!("{}", e);
assert_eq!(s, t);
}
#[allow(dead_code)]
fn parse_fail(s: &str) {
parse_str(s).unwrap();
}
#[test]
fn test_empty() { check_parse("") }
#[test]
fn test_minimal() { check_parse("()") }
#[test]
fn test_string() { check_parse("hello") }
#[test]
fn test_qstring() { check_parse("\"hello\"") }
#[test]
fn test_number() { check_parse("1.3") }
#[test]
fn test_float_vs_int() { check_parse("2.0") }
#[test]
fn test_double() { check_parse("(())") }
#[test]
fn test_br_string() { check_parse("(world)") }
#[test]
fn test_br_qstring() { check_parse("(\"world\")") }
#[test]
fn test_br_int() { check_parse("(42)") }
#[test]
fn test_br_float() { check_parse("(12.7)") }
#[test]
fn test_br_qbrstring() { check_parse("(\"(()\")") }
#[test]
#[should_panic(expected="Parse Error 1:1: end of document reached")]
fn test_invalid1() { parse_fail("(") }
#[test]
#[should_panic(expected="Parse Error 1:0: unmatched )")]
fn test_invalid2() { parse_fail(")") }
#[test]
#[should_panic(expected="Parse Error 1:6: end of document reached")]
fn test_invalid3() { parse_fail("\"hello") }
#[test]
fn test_complex() { check_parse("(module SWITCH_3W_SIDE_MMP221-R (layer F.Cu) (descr \"\") (pad 1 thru_hole rect (size 1.2 1.2) (at -2.5 -1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 2 thru_hole rect (size 1.2 1.2) (at 0.0 -1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 3 thru_hole rect (size 1.2 1.2) (at 2.5 -1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 5 thru_hole rect (size 1.2 1.2) (at 0.0 1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 6 thru_hole rect (size 1.2 1.2) (at -2.5 1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 4 thru_hole rect (size 1.2 1.2) (at 2.5 1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (fp_line (start -4.5 -1.75) (end 4.5 -1.75) (layer F.SilkS) (width 0.127)) (fp_line (start 4.5 -1.75) (end 4.5 1.75) (layer F.SilkS) (width 0.127)) (fp_line (start 4.5 1.75) (end -4.5 1.75) (layer F.SilkS) (width 0.127)) (fp_line (start -4.5 1.75) (end -4.5 -1.75) (layer F.SilkS) (width 0.127)))") }
}
fix email
// (c) 2015 Joost Yervante Damad <joost@productize.be>
// loosely based on https://github.com/cgaebel/sexp
// latest version can be found at https://github.com/andete/rust_sexp
use std::fmt;
use std::str::FromStr;
use std::f64;
use std::fs::File;
use std::io::prelude::*;
pub enum Atom {
S(String),
Q(String),
I(i64),
F(f64),
}
impl Atom {
pub fn f(&self) -> Result<f64,String> {
match *self {
Atom::F(f) => Ok(f),
Atom::I(i) => Ok(i as f64),
ref x => Err(format!("not a float: {}", x))
}
}
pub fn i(&self) -> Result<i64,String> {
match *self {
Atom::I(i) => Ok(i),
ref x => Err(format!("not an int: {}", x))
}
}
pub fn string(&self) -> Result<String,String> {
match *self {
Atom::S(ref s) => Ok(s.clone()),
Atom::Q(ref s) => Ok(s.clone()),
ref x => Err(format!("not a string: {}", x))
}
}
pub fn as_string(&self) -> Result<String,String> {
match *self {
Atom::S(ref s) => Ok(s.clone()),
Atom::Q(ref s) => Ok(s.clone()),
Atom::F(ref s) => Ok(format!("{}", s)),
Atom::I(ref s) => Ok(format!("{}", s)),
}
}
}
pub enum Sexp {
Atom(Atom),
Empty,
List(Vec<Sexp>),
}
impl Sexp {
pub fn atom(&self) -> Result<&Atom,String> {
match *self {
Sexp::Atom(ref f) => Ok(f),
ref x => Err(format!("not an atom: {}", x))
}
}
pub fn list(&self) -> Result<&Vec<Sexp>,String> {
match *self {
Sexp::List(ref v) => Ok(v),
ref x => Err(format!("not a list: {}", x))
}
}
}
pub struct ParseError {
msg: &'static str,
line: usize,
lpos: usize,
}
pub enum Error {
ParseError(ParseError),
IOError(std::io::Error),
}
pub type ERes<T> = Result<T, Error>;
fn err_parse<T>(msg: &'static str, state: &ParseState) -> ERes<T> {
Err(Error::ParseError(ParseError { msg: msg, line: state.line, lpos: state.lpos}))
}
fn err_io<T>(err: std::io::Error) -> ERes<T> {
Err(Error::IOError(err))
}
struct ParseState {
pos: usize,
line: usize,
lpos: usize,
vec: Vec<char>,
len: usize,
}
impl ParseState {
fn peek(&self) -> ERes<char> {
if self.pos >= self.len {
return err_parse("end of document reached", self)
}
Ok(self.vec[self.pos])
}
fn peek_option(&self) -> Option<char> {
if self.pos >= self.len {
return None
}
Some(self.vec[self.pos])
}
fn next(&mut self) -> ERes<()> {
if self.pos >= self.len {
return err_parse("end of document reached", self)
}
let c = self.vec[self.pos];
self.pos += 1;
match c {
'\r' | '\n' => {
self.lpos = 0;
self.line += 1;
}
_ => {
self.lpos += 1;
}
}
Ok(())
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Error::ParseError(ref p) =>
write!(f, "Parse Error {}:{}: {}", p.line, p.lpos, p.msg),
Error::IOError(ref i) =>
write!(f, "{}", i)
}
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Error::ParseError(ref p) =>
write!(f, "Parse Error {}:{}: {}", p.line, p.lpos, p.msg),
Error::IOError(ref i) =>
write!(f, "{}", i)
}
}
}
impl fmt::Display for Atom {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Atom::S(ref s) => write!(f, "{}", s),
Atom::Q(ref s) => write!(f, "\"{}\"", s),
Atom::I(i) => write!(f, "{}", i),
Atom::F(d) => {
let z = d.floor();
if d - z < f64::EPSILON {
write!(f, "{}.0", z)
} else {
write!(f, "{}", d)
}
}
}
}
}
impl fmt::Display for Sexp {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Sexp::Atom(ref a) => write!(f, "{}", a),
Sexp::List(ref xs) => {
try!(write!(f, "("));
for (i, x) in xs.iter().enumerate() {
let s = if i == 0 { "" } else { " " };
try!(write!(f, "{}{}", s, x));
}
write!(f, ")")
},
Sexp::Empty => write!(f, ""),
}
}
}
fn parse_list(state: &mut ParseState) -> ERes<Sexp> {
//println!("list");
try!(state.next()); // skip (
let mut l: Vec<Sexp> = Vec::new();
loop {
match try!(state.peek()) {
')' => {
try!(state.next());
break;
}
_ => {
l.push(try!(parse_sexp(state)));
}
}
}
//println!("Found list");
Ok(Sexp::List(l))
}
fn parse_quoted_string(state: &mut ParseState) -> ERes<Atom> {
//println!("qstring");
try!(state.next()); // skip "
let mut s = String::new();
loop {
match try!(state.peek()) {
'"' => {
try!(state.next());
break
}
x @ '\r' | x @ '\n' => {
s.push(x);
try!(state.next());
}
x => {
s.push(x);
try!(state.next());
}
}
}
//println!("Found quoted string {}", s);
Ok(Atom::Q(s))
}
fn parse_string(state: &mut ParseState) -> ERes<Atom> {
//println!("string");
let mut s = String::new();
loop {
match state.peek_option() {
Some(x) => {
match x {
' ' | '\t' | '\r' | '\n' | ')' => break,
'"' => return err_parse("unexpected \" in string", state),
x => s.push(x),
}
}
None => {
break;
}
}
try!(state.next())
}
//println!("Found string {}", s);
Ok(Atom::S(s))
}
fn parse_number(state: &mut ParseState) -> ERes<Atom> {
//println!("number");
let mut s = String::new();
loop {
match state.peek_option() {
Some(x) => {
match x {
' ' | '\r' | '\n' | '\t' | ')' => {
break
},
'0' ... '9' | '.' | '-' => {
s.push(state.vec[state.pos])
},
_ => {
return err_parse("unexpected char in number", state)
},
}
}
None => {
break
}
}
try!(state.next())
}
//println!("Found number {}", s);
let s2: &str = &s[..];
if s.contains('.') {
Ok(Atom::F(f64::from_str(s2).unwrap()))
} else {
Ok(Atom::I(i64::from_str(s2).unwrap()))
}
}
fn parse_atom(state: &mut ParseState) -> ERes<Sexp> {
//println!("atom");
let a = match try!(state.peek()) {
'"' => {
try!(parse_quoted_string(state))
}
'0' ... '9' | '.' | '-' => {
try!(parse_number(state))
}
_ => {
try!(parse_string(state))
}
};
Ok(Sexp::Atom(a))
}
fn parse_sexp(state: &mut ParseState) -> ERes<Sexp> {
loop {
match try!(state.peek()) {
'(' => {
return parse_list(state)
}
' ' | '\t' | '\r' | '\n' => {
try!(state.next());
}
')' => {
return err_parse("unmatched )", state)
}
_ => {
return parse_atom(state)
}
}
}
}
fn parse(data: &str) -> ERes<Sexp> {
if data.len() == 0 {
Ok(Sexp::Empty)
} else {
let vec: Vec<char> = data.chars().collect();
let len = vec.len();
let state = &mut ParseState { pos: 0, line: 1, lpos: 0, vec: vec, len: len };
parse_sexp(state)
}
}
fn read_file(name: &str) -> Result<String, std::io::Error> {
let mut f = try!(File::open(name));
let mut s = String::new();
try!(f.read_to_string(&mut s));
Ok(s)
}
pub fn parse_str(s: &str) -> ERes<Sexp> {
parse(s)
}
pub fn parse_file(name: &str) -> ERes<Sexp> {
let s = try!(match read_file(name) {
Ok(s) => Ok(s),
Err(x) => err_io(x),
});
parse(&s[..])
}
#[cfg(test)]
mod tests {
use super::*;
#[allow(dead_code)]
fn check_parse(s: &str) {
let e = parse_str(s).unwrap();
let t = format!("{}", e);
assert_eq!(s, t);
}
#[allow(dead_code)]
fn parse_fail(s: &str) {
parse_str(s).unwrap();
}
#[test]
fn test_empty() { check_parse("") }
#[test]
fn test_minimal() { check_parse("()") }
#[test]
fn test_string() { check_parse("hello") }
#[test]
fn test_qstring() { check_parse("\"hello\"") }
#[test]
fn test_number() { check_parse("1.3") }
#[test]
fn test_float_vs_int() { check_parse("2.0") }
#[test]
fn test_double() { check_parse("(())") }
#[test]
fn test_br_string() { check_parse("(world)") }
#[test]
fn test_br_qstring() { check_parse("(\"world\")") }
#[test]
fn test_br_int() { check_parse("(42)") }
#[test]
fn test_br_float() { check_parse("(12.7)") }
#[test]
fn test_br_qbrstring() { check_parse("(\"(()\")") }
#[test]
#[should_panic(expected="Parse Error 1:1: end of document reached")]
fn test_invalid1() { parse_fail("(") }
#[test]
#[should_panic(expected="Parse Error 1:0: unmatched )")]
fn test_invalid2() { parse_fail(")") }
#[test]
#[should_panic(expected="Parse Error 1:6: end of document reached")]
fn test_invalid3() { parse_fail("\"hello") }
#[test]
fn test_complex() { check_parse("(module SWITCH_3W_SIDE_MMP221-R (layer F.Cu) (descr \"\") (pad 1 thru_hole rect (size 1.2 1.2) (at -2.5 -1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 2 thru_hole rect (size 1.2 1.2) (at 0.0 -1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 3 thru_hole rect (size 1.2 1.2) (at 2.5 -1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 5 thru_hole rect (size 1.2 1.2) (at 0.0 1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 6 thru_hole rect (size 1.2 1.2) (at -2.5 1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (pad 4 thru_hole rect (size 1.2 1.2) (at 2.5 1.6 0) (layers *.Cu *.Mask) (drill 0.8)) (fp_line (start -4.5 -1.75) (end 4.5 -1.75) (layer F.SilkS) (width 0.127)) (fp_line (start 4.5 -1.75) (end 4.5 1.75) (layer F.SilkS) (width 0.127)) (fp_line (start 4.5 1.75) (end -4.5 1.75) (layer F.SilkS) (width 0.127)) (fp_line (start -4.5 1.75) (end -4.5 -1.75) (layer F.SilkS) (width 0.127)))") }
}
|
use std::collections::VecDeque;
use std::usize;
use std::mem;
use std::ptr;
use std::cmp;
/// ringbuffer operations on slices
pub trait SliceRing<T> {
fn push_many_back(&mut self, values: &[T]);
fn drop_many_front(&mut self, count: usize) -> usize;
fn read_many_front(&self, output: &mut [T]) -> usize;
}
impl<T: Copy> SliceRing<T> for VecDeque<T> {
fn push_many_back(&mut self, values: &[T]) {
for value in values {
// in most situations this should just be a pointer
// copy and value copy without any reallocations
self.push_back(*value);
}
}
fn drop_many_front(&mut self, count: usize) -> usize {
let real_count = std::cmp::min(self.len(), count);
for _ in 0..real_count {
self.pop_front();
}
real_count
}
fn read_many_front(&self, output: &mut [T]) -> usize {
let count = std::cmp::min(self.len(), output.len());
for i in 0..count {
output[i] = self[i];
}
count
}
}
const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
const MINIMUM_CAPACITY: usize = 1; // 2 - 1
const MAXIMUM_ZST_CAPACITY: usize = usize::MAX;
// readable area starts at first_readable and goes until (not including)
// next_writable is one after the last readable
// TODO move this into its own file
// R = first_readable
// W = next_writable
// o = occupied (len)
// . = free
//
// R W
// [o o o o o o o . . . .]
pub struct OptimizedSliceRing<T> {
/// index into `buf` of the first element that could be read.
/// only to be incremented.
pub first_readable: usize,
/// index into `buf` where the next element could we written
pub next_writable: usize,
pub buf: Vec<T>,
}
/// Calculate the number of elements left to be read in the buffer
#[inline]
fn count(tail: usize, head: usize, size: usize) -> usize {
// size is always a power of 2
(head.wrapping_sub(tail)) & (size - 1)
}
#[inline]
fn wrap_index(index: usize, size: usize) -> usize {
// size is always a power of 2
// TODO ?
debug_assert!(size.is_power_of_two());
// TODO or is this because the capacity preserves 1 always ?
let max_index = size - 1;
index & max_index
}
// TODO implement only what's needed below
/// ringbuffer focused on and optimized for operating on slices of values:
/// appending to the back, reading from the front
/// and dropping from the front.
/// which is much faster.
/// TODO call OptimizedSliceRingImpl
impl<T> OptimizedSliceRing<T> {
/// Creates an empty `OptimizedSliceRing`.
pub fn new() -> OptimizedSliceRing<T> {
OptimizedSliceRing::with_capacity(INITIAL_CAPACITY)
}
/// Creates an empty `OptimizedSliceRing` with space for at least `n` elements.
pub fn with_capacity(n: usize) -> OptimizedSliceRing<T> {
// +1 since the ringbuffer always leaves one space empty
let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
assert!(cap > n, "capacity overflow");
OptimizedSliceRing {
first_readable: 0,
next_writable: 0,
buf: Vec::with_capacity(cap),
}
}
#[inline]
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
// For zero sized types, we are always at maximum capacity
MAXIMUM_ZST_CAPACITY
} else {
self.buf.capacity()
}
}
#[inline]
pub fn capacity(&self) -> usize {
self.cap() - 1
}
#[inline]
pub fn is_continuous(&self) -> bool {
self.first_readable <= self.next_writable
}
#[inline]
pub fn len(&self) -> usize {
count(self.first_readable, self.next_writable, self.cap())
}
/// - 1 because ...
#[inline]
pub fn wrap_add(&self, index: usize, addend: usize) -> usize {
// wrapping_add is a method of std::usize
wrap_index(index.wrapping_add(addend), self.cap())
}
/// Copies a contiguous block of memory len long from src to dst
/// we can use this if we own the data and move it around
/// instead of copying it.
/// the data still exists in only in one place.
/// it is just moved to another place.
#[inline]
unsafe fn copy_nonoverlapping(&mut self, src: usize, dst: usize, len: usize) {
debug_assert!(dst + len <= self.cap(), "dst={} src={} len={} cap={}", dst, src, len,
self.cap());
debug_assert!(src + len <= self.cap(), "dst={} src={} len={} cap={}", dst, src, len,
self.cap());
}
/// this is the most complex part
/// Frobs the head and tail sections around to handle the fact that we
/// just reallocated. Unsafe because it trusts old_cap.
#[inline]
pub unsafe fn handle_cap_increase(&mut self, old_cap: usize) {
// move the shortest contiguous section of the ring buffer
// R = first_readable
// W = next_writable
// o = occupied
// . = free
// c = copied by `handle_cap_increase`
// continuous !
// before cap increase:
// R W
// [o o o o o o o . ]
// after cap increase:
// R W
// [o o o o o o o . . . . . . . . . ]
// after handle_cap_increase:
// R W
// [o o o o o o o . . . . . . . . . ]
if self.is_continuous() { return }
// shortest section at front:
// before cap increase:
// W R
// [o o . o o o o o ]
// after cap increase:
// W R
// [c c . o o o o o . . . . . . . . ]
// after handle_cap_increase:
// R W
// [. . . o o o o o c c . . . . . . ]
if self.next_writable < old_cap - self.first_readable {
let next_writable = self.next_writable;
let copy_src = 0;
// after the previous
let copy_dst = old_cap;
// everything before next_writable
let copy_len = next_writable;
self.copy_nonoverlapping(copy_src, copy_dst, copy_len);
self.next_writable += old_cap;
debug_assert!(self.next_writable > self.first_readable);
debug_assert!(self.next_writable < self.cap());
debug_assert!(self.first_readable < self.cap());
debug_assert!(self.cap().is_power_of_two());
return
}
// shortest section at tail:
// before cap increase:
// W R
// [o o o o o . o o ]
// after cap increase:
// W R
// [o o o o o . c c . . . . . . . . ]
// after handle_cap_increase:
// W R
// [o o o o o . . . . . . . . . c c ]
let new_cap = self.cap();
let new_first_readable =
new_cap - (old_cap - self.first_readable);
let copy_src = self.first_readable;
let copy_dst = new_first_readable;
let copy_len = old_cap - self.first_readable;
self.copy_nonoverlapping(copy_src, copy_dst, copy_len);
self.first_readable = new_first_readable;
debug_assert!(self.next_writable < self.first_readable);
debug_assert!(self.next_writable < self.cap());
debug_assert!(self.first_readable < self.cap());
debug_assert!(self.cap().is_power_of_two());
}
}
// TODO test with zero sized types and max length
impl<T: Copy> SliceRing<T> for OptimizedSliceRing<T> {
/// increases `self.len()` by `count`.
fn push_many_back(&mut self, input: &[T]) {
// make enough space
let additional = input.len();
let required = self.buf.len() + additional;
let cap = self.cap();
if cap < required {
self.buf.reserve(required.next_power_of_two());
unsafe {
self.handle_cap_increase(cap);
}
}
for i in 0..additional {
// Unsafe code so this can be optimised to a memcpy (or something
// similarly fast) when T is Copy. LLVM is easily confused, so any
// extra operations during the loop can prevent this optimisation.
// TODO benchmark a T (struct) that is Copy
// vs a T (struct) that is Clone
// TODO maybe replace by two loops that
// each copy consecutive elements
unsafe {
let dst_index = self.wrap_add(self.next_writable, i);
let dst = self.buf.get_unchecked_mut(dst_index);
let src = input.get_unchecked(i).clone();
ptr::write(dst, src);
}
}
self.next_writable = self.wrap_add(self.next_writable, additional);
}
/// reduces `self.len()` by `count`.
fn drop_many_front(&mut self, count: usize) -> usize {
// TODO improve name of real_count
let real_count = std::cmp::min(self.len(), count);
self.first_readable = self.wrap_add(
self.first_readable, real_count);
real_count
}
fn read_many_front(&self, output: &mut [T]) -> usize {
let real_count = std::cmp::min(self.len(), output.len());
for i in 0..real_count {
// Unsafe code so this can be optimised to a memcpy (or something
// similarly fast) when T is Copy. LLVM is easily confused, so any
// extra operations during the loop can prevent this optimisation.
unsafe {
let dst = output.get_unchecked_mut(i);
let src_index = self.wrap_add(self.first_readable, i);
let src = self.buf.get_unchecked(src_index).clone();
ptr::write(dst, src);
}
}
real_count
}
}
// /// for safe and convenient ... fixed window and step size
// /// this is the main thing of this module
// /// two backing buffer types:
// /// one simple for illustration
// /// one optimized for performance
// /// benchmarked against each other
// pub struct SlidingWindow<T, Storage: SliceRing<T>> {
// pub window_size: usize,
// pub step_size: usize,
// pub buf: T
// }
//
// impl<T, Storage: SliceRing<T>> SlidingWindow<Storage> {
// pub fn from_storage(storage: Storage) {
//
// }
// pub fn new_slow(window_size: usize, step_size: usize) {
// SlidingWindow {
// window_size: window_size,
// step_size: step_size,
// // TODO initialize based on window and step size
// buf: VecDeque::<T>::new()
// }
// }
// }
//
// drop `count` elements
// remove `step_size` values from the front of `ringbuffer`
// O(1) instead of O(n)
//
// impl<T> FixedSliceRing<T> {
//
// /// returns the number of values appended
// /// `O(n)` where `n = fill_me.len()`
// pub fn push(&mut self, &[T]) -> usize {
//
// }
//
// /// write into `fill_me` the first `fill_me.len()` values
// /// present in this ring.
// /// `O(n)` where `n = fill_me.len()`
// pub fn peak(&self, fill_me: &[T]) -> usize {
//
// }
//
// /// drop (remove) the first `count` values
// /// present in this ring.
// /// O(1)
// pub fn pop(&mut self, count: usize) -> usize
//
// }
//
// pub fn len(&self) {
//
// }
//
// ///
// pub fn space(&self) {
//
// }
// }
// impl<T: Copy> SlidingWindow<T> {
// pub fn new(window_size: usize, step_size: usize) -> Self {
// assert!(0 < window_size);
// assert!(0 < step_size);
//
// SlidingWindow {
// window_size: window_size,
// step_size: step_size,
// // TODO with_capacity
// ringbuffer: VecDeque::new(),
// }
// }
//
// // TODO how fast is this ?
// // time complexity
// // append `samples`
// pub fn len
//
// pub fn is_full(&self) -> bool {
// self.window_size <= self.ringbuffer.len()
// }
//
// pub fn read_front(&mut self, fill_me: &mut [T]) -> bool {
// if !self.can_fill() { return false; }
// assert_eq!(fill_me.len(), self.window_size);
// for i in 0..self.window_size {
// fill_me[i] = self.ringbuffer[i];
// }
// true
// }
//
// pub fn drop_front(&mut self, count: usize) {
// for _ in 0..count {
// self.buf.pop_front();
// }
// }
//
// // if `self.can_fill()` fills `window` fully with the next
// // `window_size` samples.
// // then makes a step discards `self.step_size` samples.
// // else does nothing.
// // `window.len()` must be equal to `self.window_size`.
// // returns whether `self.can_fill()`.
// pub fn fill_and_step(&mut self, fill_me: &mut [T]) -> bool {
// if !self.fill(fill_me) { return false; }
// self.step();
// true
// }
// }
/// macro containing a test run that is used to test and benchmark
/// different implementations of the `SliceRing` trait
#[macro_export]
macro_rules! test_slice_ring {
($new:expr) => {{
let mut testable = $new;
// we use debug_assert_eq! here because it is omitted in
// release mode which we want when benchmarking
debug_assert_eq!(testable.len(), 0);
let mut output: Vec<i32> = std::iter::repeat(0).take(1000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 0);
debug_assert_eq!(output, std::iter::repeat(0).take(1000).collect::<Vec<i32>>());
debug_assert_eq!(testable.drop_many_front(505), 0);
debug_assert_eq!(testable.len(), 0);
let input = (0..3000).collect::<Vec<i32>>();
testable.push_many_back(&input[..]);
debug_assert_eq!(testable.len(), 3000);
debug_assert_eq!(testable.capacity(), 4095);
let mut output: Vec<i32> = std::iter::repeat(0).take(1000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 1000);
debug_assert_eq!(output, (0..1000).collect::<Vec<i32>>());
let mut output: Vec<i32> = std::iter::repeat(0).take(200).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 200);
debug_assert_eq!(output, (0..200).collect::<Vec<i32>>());
debug_assert_eq!(testable.drop_many_front(100), 100);
debug_assert_eq!(testable.len(), 2900);
let mut output: Vec<i32> = std::iter::repeat(0).take(1000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 1000);
debug_assert_eq!(output, (100..1100).collect::<Vec<i32>>());
debug_assert_eq!(testable.drop_many_front(505), 505);
debug_assert_eq!(testable.len(), 2395);
let mut output: Vec<i32> = std::iter::repeat(0).take(4000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 2395);
debug_assert_eq!(
output, (605..3000).chain(std::iter::repeat(0).take(1605)).collect::<Vec<i32>>());
let input = (3000..4000).collect::<Vec<i32>>();
testable.push_many_back(&input[..]);
debug_assert_eq!(testable.len(), 3395);
debug_assert_eq!(testable.capacity(), 4095);
let input = (5000..7000).collect::<Vec<i32>>();
testable.push_many_back(&input[..]);
debug_assert_eq!(testable.len(), 5395);
debug_assert_eq!(testable.capacity(), 8191);
// TODO push more
//
// TODO drop more than contained
//
// TODO little window run
}};
}
reformat
use std::collections::VecDeque;
use std::usize;
use std::mem;
use std::ptr;
use std::cmp;
/// ringbuffer operations on slices
pub trait SliceRing<T> {
fn push_many_back(&mut self, values: &[T]);
fn drop_many_front(&mut self, count: usize) -> usize;
fn read_many_front(&self, output: &mut [T]) -> usize;
}
impl<T: Copy> SliceRing<T> for VecDeque<T> {
fn push_many_back(&mut self, values: &[T]) {
for value in values {
// in most situations this should just be a pointer
// copy and value copy without any reallocations
self.push_back(*value);
}
}
fn drop_many_front(&mut self, count: usize) -> usize {
let real_count = std::cmp::min(self.len(), count);
for _ in 0..real_count {
self.pop_front();
}
real_count
}
fn read_many_front(&self, output: &mut [T]) -> usize {
let count = std::cmp::min(self.len(), output.len());
for i in 0..count {
output[i] = self[i];
}
count
}
}
const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
const MINIMUM_CAPACITY: usize = 1; // 2 - 1
const MAXIMUM_ZST_CAPACITY: usize = usize::MAX;
// readable area starts at first_readable and goes until (not including)
// next_writable is one after the last readable
// TODO move this into its own file
// R = first_readable
// W = next_writable
// o = occupied (len)
// . = free
//
// R W
// [o o o o o o o . . . .]
pub struct OptimizedSliceRing<T> {
/// index into `buf` of the first element that could be read.
/// only to be incremented.
pub first_readable: usize,
/// index into `buf` where the next element could we written
pub next_writable: usize,
pub buf: Vec<T>,
}
/// Calculate the number of elements left to be read in the buffer
#[inline]
fn count(tail: usize, head: usize, size: usize) -> usize {
// size is always a power of 2
(head.wrapping_sub(tail)) & (size - 1)
}
#[inline]
fn wrap_index(index: usize, size: usize) -> usize {
// size is always a power of 2
// TODO ?
debug_assert!(size.is_power_of_two());
// TODO or is this because the capacity preserves 1 always ?
let max_index = size - 1;
index & max_index
}
// TODO implement only what's needed below
/// ringbuffer focused on and optimized for operating on slices of values:
/// appending to the back, reading from the front
/// and dropping from the front.
/// which is much faster.
/// TODO call OptimizedSliceRingImpl
impl<T> OptimizedSliceRing<T> {
/// Creates an empty `OptimizedSliceRing`.
pub fn new() -> OptimizedSliceRing<T> {
OptimizedSliceRing::with_capacity(INITIAL_CAPACITY)
}
/// Creates an empty `OptimizedSliceRing` with space for at least `n` elements.
pub fn with_capacity(n: usize) -> OptimizedSliceRing<T> {
// +1 since the ringbuffer always leaves one space empty
let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
assert!(cap > n, "capacity overflow");
OptimizedSliceRing {
first_readable: 0,
next_writable: 0,
buf: Vec::with_capacity(cap),
}
}
#[inline]
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
// For zero sized types, we are always at maximum capacity
MAXIMUM_ZST_CAPACITY
} else {
self.buf.capacity()
}
}
#[inline]
pub fn capacity(&self) -> usize {
self.cap() - 1
}
#[inline]
pub fn is_continuous(&self) -> bool {
self.first_readable <= self.next_writable
}
#[inline]
pub fn len(&self) -> usize {
count(self.first_readable, self.next_writable, self.cap())
}
/// - 1 because ...
#[inline]
pub fn wrap_add(&self, index: usize, addend: usize) -> usize {
// wrapping_add is a method of std::usize
wrap_index(index.wrapping_add(addend), self.cap())
}
/// Copies a contiguous block of memory len long from src to dst
/// we can use this if we own the data and move it around
/// instead of copying it.
/// the data still exists in only in one place.
/// it is just moved to another place.
#[inline]
unsafe fn copy_nonoverlapping(&mut self, src: usize, dst: usize, len: usize) {
debug_assert!(dst + len <= self.cap(), "dst={} src={} len={} cap={}", dst, src, len,
self.cap());
debug_assert!(src + len <= self.cap(), "dst={} src={} len={} cap={}", dst, src, len,
self.cap());
}
/// this is the most complex part
/// Frobs the head and tail sections around to handle the fact that we
/// just reallocated. Unsafe because it trusts old_cap.
#[inline]
pub unsafe fn handle_cap_increase(&mut self, old_cap: usize) {
// move the shortest contiguous section of the ring buffer
// R = first_readable
// W = next_writable
// o = occupied
// . = free
// c = copied by `handle_cap_increase`
// continuous !
// before cap increase:
// R W
// [o o o o o o o . ]
// after cap increase:
// R W
// [o o o o o o o . . . . . . . . . ]
// after handle_cap_increase:
// R W
// [o o o o o o o . . . . . . . . . ]
if self.is_continuous() {
return
}
// shortest section at front:
// before cap increase:
// W R
// [o o . o o o o o ]
// after cap increase:
// W R
// [c c . o o o o o . . . . . . . . ]
// after handle_cap_increase:
// R W
// [. . . o o o o o c c . . . . . . ]
if self.next_writable < old_cap - self.first_readable {
let next_writable = self.next_writable;
let copy_src = 0;
// after the previous
let copy_dst = old_cap;
// everything before next_writable
let copy_len = next_writable;
self.copy_nonoverlapping(copy_src, copy_dst, copy_len);
self.next_writable += old_cap;
debug_assert!(self.next_writable > self.first_readable);
debug_assert!(self.next_writable < self.cap());
debug_assert!(self.first_readable < self.cap());
debug_assert!(self.cap().is_power_of_two());
return
}
// shortest section at tail:
// before cap increase:
// W R
// [o o o o o . o o ]
// after cap increase:
// W R
// [o o o o o . c c . . . . . . . . ]
// after handle_cap_increase:
// W R
// [o o o o o . . . . . . . . . c c ]
let new_cap = self.cap();
let new_first_readable =
new_cap - (old_cap - self.first_readable);
let copy_src = self.first_readable;
let copy_dst = new_first_readable;
let copy_len = old_cap - self.first_readable;
self.copy_nonoverlapping(copy_src, copy_dst, copy_len);
self.first_readable = new_first_readable;
debug_assert!(self.next_writable < self.first_readable);
debug_assert!(self.next_writable < self.cap());
debug_assert!(self.first_readable < self.cap());
debug_assert!(self.cap().is_power_of_two());
}
}
// TODO test with zero sized types and max length
impl<T: Copy> SliceRing<T> for OptimizedSliceRing<T> {
/// increases `self.len()` by `count`.
fn push_many_back(&mut self, input: &[T]) {
// make enough space
let additional = input.len();
let required = self.buf.len() + additional;
let cap = self.cap();
if cap < required {
self.buf.reserve(required.next_power_of_two());
unsafe {
self.handle_cap_increase(cap);
}
}
for i in 0..additional {
// Unsafe code so this can be optimised to a memcpy (or something
// similarly fast) when T is Copy. LLVM is easily confused, so any
// extra operations during the loop can prevent this optimisation.
// TODO benchmark a T (struct) that is Copy
// vs a T (struct) that is Clone
// TODO maybe replace by two loops that
// each copy consecutive elements
unsafe {
let dst_index = self.wrap_add(self.next_writable, i);
let dst = self.buf.get_unchecked_mut(dst_index);
let src = input.get_unchecked(i).clone();
ptr::write(dst, src);
}
}
self.next_writable = self.wrap_add(self.next_writable, additional);
}
/// reduces `self.len()` by `count`.
fn drop_many_front(&mut self, count: usize) -> usize {
// TODO improve name of real_count
let real_count = std::cmp::min(self.len(), count);
self.first_readable = self.wrap_add(
self.first_readable, real_count);
real_count
}
fn read_many_front(&self, output: &mut [T]) -> usize {
let real_count = std::cmp::min(self.len(), output.len());
for i in 0..real_count {
// Unsafe code so this can be optimised to a memcpy (or something
// similarly fast) when T is Copy. LLVM is easily confused, so any
// extra operations during the loop can prevent this optimisation.
unsafe {
let dst = output.get_unchecked_mut(i);
let src_index = self.wrap_add(self.first_readable, i);
let src = self.buf.get_unchecked(src_index).clone();
ptr::write(dst, src);
}
}
real_count
}
}
// /// for safe and convenient ... fixed window and step size
// /// this is the main thing of this module
// /// two backing buffer types:
// /// one simple for illustration
// /// one optimized for performance
// /// benchmarked against each other
// pub struct SlidingWindow<T, Storage: SliceRing<T>> {
// pub window_size: usize,
// pub step_size: usize,
// pub buf: T
// }
//
// impl<T, Storage: SliceRing<T>> SlidingWindow<Storage> {
// pub fn from_storage(storage: Storage) {
//
// }
// pub fn new_slow(window_size: usize, step_size: usize) {
// SlidingWindow {
// window_size: window_size,
// step_size: step_size,
// // TODO initialize based on window and step size
// buf: VecDeque::<T>::new()
// }
// }
// }
//
// drop `count` elements
// remove `step_size` values from the front of `ringbuffer`
// O(1) instead of O(n)
//
// impl<T> FixedSliceRing<T> {
//
// /// returns the number of values appended
// /// `O(n)` where `n = fill_me.len()`
// pub fn push(&mut self, &[T]) -> usize {
//
// }
//
// /// write into `fill_me` the first `fill_me.len()` values
// /// present in this ring.
// /// `O(n)` where `n = fill_me.len()`
// pub fn peak(&self, fill_me: &[T]) -> usize {
//
// }
//
// /// drop (remove) the first `count` values
// /// present in this ring.
// /// O(1)
// pub fn pop(&mut self, count: usize) -> usize
//
// }
//
// pub fn len(&self) {
//
// }
//
// ///
// pub fn space(&self) {
//
// }
// }
// impl<T: Copy> SlidingWindow<T> {
// pub fn new(window_size: usize, step_size: usize) -> Self {
// assert!(0 < window_size);
// assert!(0 < step_size);
//
// SlidingWindow {
// window_size: window_size,
// step_size: step_size,
// // TODO with_capacity
// ringbuffer: VecDeque::new(),
// }
// }
//
// // TODO how fast is this ?
// // time complexity
// // append `samples`
// pub fn len
//
// pub fn is_full(&self) -> bool {
// self.window_size <= self.ringbuffer.len()
// }
//
// pub fn read_front(&mut self, fill_me: &mut [T]) -> bool {
// if !self.can_fill() { return false; }
// assert_eq!(fill_me.len(), self.window_size);
// for i in 0..self.window_size {
// fill_me[i] = self.ringbuffer[i];
// }
// true
// }
//
// pub fn drop_front(&mut self, count: usize) {
// for _ in 0..count {
// self.buf.pop_front();
// }
// }
//
// // if `self.can_fill()` fills `window` fully with the next
// // `window_size` samples.
// // then makes a step discards `self.step_size` samples.
// // else does nothing.
// // `window.len()` must be equal to `self.window_size`.
// // returns whether `self.can_fill()`.
// pub fn fill_and_step(&mut self, fill_me: &mut [T]) -> bool {
// if !self.fill(fill_me) { return false; }
// self.step();
// true
// }
// }
/// macro containing a test run that is used to test and benchmark
/// different implementations of the `SliceRing` trait
#[macro_export]
macro_rules! test_slice_ring {
($new:expr) => {{
let mut testable = $new;
// we use debug_assert_eq! here because it is omitted in
// release mode which we want when benchmarking
debug_assert_eq!(testable.len(), 0);
let mut output: Vec<i32> = std::iter::repeat(0).take(1000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 0);
debug_assert_eq!(output, std::iter::repeat(0).take(1000).collect::<Vec<i32>>());
debug_assert_eq!(testable.drop_many_front(505), 0);
debug_assert_eq!(testable.len(), 0);
let input = (0..3000).collect::<Vec<i32>>();
testable.push_many_back(&input[..]);
debug_assert_eq!(testable.len(), 3000);
debug_assert_eq!(testable.capacity(), 4095);
let mut output: Vec<i32> = std::iter::repeat(0).take(1000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 1000);
debug_assert_eq!(output, (0..1000).collect::<Vec<i32>>());
let mut output: Vec<i32> = std::iter::repeat(0).take(200).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 200);
debug_assert_eq!(output, (0..200).collect::<Vec<i32>>());
debug_assert_eq!(testable.drop_many_front(100), 100);
debug_assert_eq!(testable.len(), 2900);
let mut output: Vec<i32> = std::iter::repeat(0).take(1000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 1000);
debug_assert_eq!(output, (100..1100).collect::<Vec<i32>>());
debug_assert_eq!(testable.drop_many_front(505), 505);
debug_assert_eq!(testable.len(), 2395);
let mut output: Vec<i32> = std::iter::repeat(0).take(4000).collect();
debug_assert_eq!(testable.read_many_front(&mut output[..]), 2395);
debug_assert_eq!(
output, (605..3000).chain(std::iter::repeat(0).take(1605)).collect::<Vec<i32>>());
let input = (3000..4000).collect::<Vec<i32>>();
testable.push_many_back(&input[..]);
debug_assert_eq!(testable.len(), 3395);
debug_assert_eq!(testable.capacity(), 4095);
let input = (5000..7000).collect::<Vec<i32>>();
testable.push_many_back(&input[..]);
debug_assert_eq!(testable.len(), 5395);
debug_assert_eq!(testable.capacity(), 8191);
// TODO push more
//
// TODO drop more than contained
//
// TODO little window run
}};
}
|
//! Solvent is a dependency resolver library written in rust.
//!
//! Solvent helps you to resolve dependency orderings by building up a dependency
//! graph and then resolving the dependences of some target node in an order such
//! that each output depends only upon the previous outputs.
//!
//! It is currently quite simple, but is still useful.
//!
//! #Example
//!
//! ```rust
//! extern crate solvent;
//!
//! use solvent::DepGraph;
//!
//! fn main() {
//! // Create a new empty DepGraph. Must be `mut` or else it cannot
//! // be used by the rest of the library.
//! let mut depgraph: DepGraph = DepGraph::new();
//!
//! // You can register a dependency like this. Solvent will
//! // automatically create nodes for any term it has not seen before.
//! // This means 'b' depends on 'd'
//! depgraph.register_dependency("b","d");
//!
//! // You can also register multiple dependencies at once
//! depgraph.register_dependencies("a",&["b","c","d"]);
//! depgraph.register_dependencies("c",&["e"]);
//!
//! // Iterate through each dependency of "a". The dependencies will be
//! // returned in an order such that each output only depends on the
//! // previous outputs (or nothing). The target itself will be output
//! // last.
//! for node in depgraph.dependencies_of("a") {
//! print!("{} ", node.unwrap());
//! }
//! }
//! ```
//!
//! The above will output: `d b e c a` or `e c d b a` or some other valid
//! dependency order.
//!
//! The algorithm is not deterministic, and may give a different answer each
//! time it is run. Beware.
//!
//! The iterator dependencies_of() returns an Option<Result<String,SolventError>>.
//! The for loop handles the Option part for you, but you may want to check the
//! result for SolventErrors. Once an error is returned, all subsequent calls to
//! the iterator next() will yield None.
//!
//! You can also mark some elements as already satisfied, and the iterator
//! will take that into account:
//!
//! ```ignore
//! depgraph.mark_as_satisfied(["e","c"]);
//! ```
//!
//! Dependency cycles are detected and will return SolventError::CycleDetected.
#![crate_name = "solvent"]
#![crate_type = "lib"]
// Required for log and rustdoc:
#![feature(phase)]
#![feature(associated_types)]
#[phase(plugin, link)]
extern crate log;
use std::collections::{HashMap,HashSet};
use std::collections::hash_map::Entry;
use std::iter::{Iterator};
use std::borrow::ToOwned;
/// This is the dependency graph.
#[derive(Clone)]
pub struct DepGraph {
/// List of dependencies. Key is the element, values are the
/// other elements that the key element depends upon.
pub dependencies: HashMap<String,HashSet<String>>,
/// Nodes already satisfied. dependencies_of() will prune
/// dependency searches at these nodes, and not output nodes
/// registered here.
pub satisfied: HashSet<String>,
}
#[derive(Copy,Show,PartialEq)]
pub enum SolventError {
CycleDetected,
// TODO once we implement conflicts: Conflict(String)
}
/// This iterates through the dependencies of the DepGraph's target
pub struct DepGraphIterator<'a> {
depgraph: &'a DepGraph,
// Target we are trying to satisfy
target: String,
// Elements already satisfied during this iterator's walk
satisfied: HashSet<String>,
// Current path, for cycle detection
curpath: HashSet<String>,
// Halted. Used so that it can return None after an Err is returned.
halted: bool,
}
impl DepGraph {
/// Create an empty DepGraph.
pub fn new() -> DepGraph
{
DepGraph {
dependencies: HashMap::new(),
satisfied: HashSet::new(),
}
}
/// Add a dependency to a DepGraph. The node does not need
/// to pre-exist, nor do the dependency nodes. But if the
/// node does pre-exist, the depends_on will be added to its
/// existing dependency list.
pub fn register_dependency<'a>( &mut self,
node: &'a str,
depends_on: &'a str )
{
match self.dependencies.entry( &String::from_str(node) ) {
Entry::Vacant(entry) => {
let mut deps = HashSet::with_capacity(1);
deps.insert( String::from_str(depends_on) );
entry.insert( deps );
},
Entry::Occupied(mut entry) => {
(*entry.get_mut()).insert(String::from_str(depends_on));
},
}
}
/// Add multiple dependencies of one node to a DepGraph. The
/// node does not need to pre-exist, nor do the dependency elements.
/// But if the node does pre-exist, the depends_on will be added
/// to its existing dependency list.
pub fn register_dependencies<'a>( &mut self,
node: &'a str,
depends_on: &'a[&'a str] )
{
match self.dependencies.entry( &String::from_str(node) ) {
Entry::Vacant(entry) => {
let mut deps = HashSet::with_capacity( depends_on.len() );
for s in depends_on.iter() {
deps.insert( String::from_str(*s) );
}
entry.insert( deps );
},
Entry::Occupied(mut entry) => {
for s in depends_on.iter() {
(*entry.get_mut()).insert( String::from_str(*s) );
}
},
}
}
/// This marks a node as satisfied. Iterators will not output
/// such nodes.
pub fn mark_as_satisfied<'a>( &mut self,
nodes: &'a[&'a str] )
{
for node in nodes.iter() {
self.satisfied.insert(String::from_str(*node));
}
}
/// Get an iterator to iterate through the dependencies of
/// the target node.
pub fn dependencies_of<'a>(&'a self, target: &str) -> DepGraphIterator<'a>
{
// TODO: iterator's satisfied could start empty, and all checks
// could separately check depgraph's and iterator's. That
// would avoid the copy.
DepGraphIterator {
depgraph: self,
target: target.to_owned(),
satisfied: self.satisfied.clone(),
curpath: HashSet::new(),
halted: false,
}
}
}
impl<'a> DepGraphIterator<'a> {
fn get_next_dependency(&mut self, node: &String) -> Result<String,SolventError>
{
if self.curpath.contains(node) {
return Err(SolventError::CycleDetected);
}
self.curpath.insert(node.clone());
let deplist = match self.depgraph.dependencies.get(node) {
None => return Ok(node.clone()),
Some(deplist) => deplist.clone() // ouch
};
for n in deplist.iter() {
// Prune satisfied nodes
if self.satisfied.contains(n) {
continue;
}
return self.get_next_dependency(n);
}
// nodes dependencies are satisfied
Ok(node.clone())
}
}
impl<'a> Iterator for DepGraphIterator<'a> {
type Item = Result<String,SolventError>;
/// Get next dependency. Returns None when finished. If
/// Some(Err(SolventError)) occurs, all subsequent calls will
/// return None.
fn next(&mut self) -> Option< Result<String,SolventError> >
{
if self.halted {
return None;
}
let node = self.target.clone();
if self.satisfied.contains(&node) {
return None;
}
self.curpath.clear();
let next = match self.get_next_dependency(&node) {
Ok(d) => d,
Err(e) => {
self.halted = true;
return Some(Err(e));
}
};
self.satisfied.insert(next.clone());
Some(Ok(next))
}
}
#[test]
fn solvent_test_branching() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependencies("a",&["b","c","d"]);
depgraph.register_dependency("b","d");
depgraph.register_dependencies("c",&["e","m","g"]);
depgraph.register_dependency("e","f");
depgraph.register_dependency("g","h");
depgraph.register_dependency("h","i");
depgraph.register_dependencies("i",&["j","k"]);
depgraph.register_dependencies("k",&["l","m"]);
depgraph.register_dependency("m","n");
let mut results: Vec<String> = Vec::new();
for node in depgraph.dependencies_of("a") {
// detect infinite looping bugs
assert!(results.len() < 30);
let n = node.unwrap();
// Check that all of that nodes dependencies have already been output
let deps: Option<&HashSet<String>> = depgraph.dependencies.get(&n);
if deps.is_some() {
for dep in deps.unwrap().iter() {
assert!( results.contains(dep) );
}
}
results.push(n.clone());
}
// Be sure we actually output enough stuff
assert!(results.len() == 14);
// Be sure each output is unique
for result in results.iter() {
let mut count = 0u;
for result2 in results.iter() {
if result == result2 { count = count + 1; }
}
assert!(count == 1);
}
}
#[test]
fn solvent_test_updating_dependencies() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependencies("a",&["b","c"]);
depgraph.register_dependency("a","d");
assert!(depgraph.dependencies.get("a").unwrap().contains("b"));
assert!(depgraph.dependencies.get("a").unwrap().contains("c"));
assert!(depgraph.dependencies.get("a").unwrap().contains("d"));
}
#[test]
fn solvent_test_circular() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependency("a","b");
depgraph.register_dependency("b","c");
depgraph.register_dependency("c","a");
for node in depgraph.dependencies_of("a") {
assert!(node.is_err());
assert!(node.unwrap_err() == SolventError::CycleDetected);
}
}
#[test]
fn solvent_test_satisfied_stoppage() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependencies("superconn", &[]);
depgraph.register_dependencies("owneruser", &["superconn"]);
depgraph.register_dependencies("appuser", &["superconn"]);
depgraph.register_dependencies("database", &["owneruser"]);
depgraph.register_dependencies("ownerconn", &["database","owneruser"]);
depgraph.register_dependencies("adminconn", &["database"]);
depgraph.register_dependencies("extensions", &["database","adminconn"]);
depgraph.register_dependencies("schema_table", &["database","ownerconn"]);
depgraph.register_dependencies("schemas", &["ownerconn","extensions","schema_table","appuser"]);
depgraph.register_dependencies("appconn", &["database","appuser","schemas"]);
depgraph.mark_as_satisfied(&["owneruser","appuser"]);
let mut results: Vec<String> = Vec::new();
for node in depgraph.dependencies_of("appconn") {
assert!(results.len() < 30);
results.push(node.unwrap());
}
// Be sure we did not depend on these
assert!( !results.contains(&String::from_str("appuser")) );
assert!( !results.contains(&String::from_str("owneruser")) );
assert!( !results.contains(&String::from_str("superconn")) );
// Be sure we actually output enough stuff
assert!(results.len() == 7);
// Be sure each output is unique
for result in results.iter() {
let mut count = 0u;
for result2 in results.iter() {
if result == result2 { count = count + 1; }
}
assert!(count == 1);
}
}
upstream rust: compiler directive changes
//! Solvent is a dependency resolver library written in rust.
//!
//! Solvent helps you to resolve dependency orderings by building up a dependency
//! graph and then resolving the dependences of some target node in an order such
//! that each output depends only upon the previous outputs.
//!
//! It is currently quite simple, but is still useful.
//!
//! #Example
//!
//! ```rust
//! extern crate solvent;
//!
//! use solvent::DepGraph;
//!
//! fn main() {
//! // Create a new empty DepGraph. Must be `mut` or else it cannot
//! // be used by the rest of the library.
//! let mut depgraph: DepGraph = DepGraph::new();
//!
//! // You can register a dependency like this. Solvent will
//! // automatically create nodes for any term it has not seen before.
//! // This means 'b' depends on 'd'
//! depgraph.register_dependency("b","d");
//!
//! // You can also register multiple dependencies at once
//! depgraph.register_dependencies("a",&["b","c","d"]);
//! depgraph.register_dependencies("c",&["e"]);
//!
//! // Iterate through each dependency of "a". The dependencies will be
//! // returned in an order such that each output only depends on the
//! // previous outputs (or nothing). The target itself will be output
//! // last.
//! for node in depgraph.dependencies_of("a") {
//! print!("{} ", node.unwrap());
//! }
//! }
//! ```
//!
//! The above will output: `d b e c a` or `e c d b a` or some other valid
//! dependency order.
//!
//! The algorithm is not deterministic, and may give a different answer each
//! time it is run. Beware.
//!
//! The iterator dependencies_of() returns an Option<Result<String,SolventError>>.
//! The for loop handles the Option part for you, but you may want to check the
//! result for SolventErrors. Once an error is returned, all subsequent calls to
//! the iterator next() will yield None.
//!
//! You can also mark some elements as already satisfied, and the iterator
//! will take that into account:
//!
//! ```ignore
//! depgraph.mark_as_satisfied(["e","c"]);
//! ```
//!
//! Dependency cycles are detected and will return SolventError::CycleDetected.
#![crate_name = "solvent"]
#![crate_type = "lib"]
#[macro_use] extern crate log;
use std::collections::{HashMap,HashSet};
use std::collections::hash_map::Entry;
use std::iter::{Iterator};
use std::borrow::ToOwned;
/// This is the dependency graph.
#[derive(Clone)]
pub struct DepGraph {
/// List of dependencies. Key is the element, values are the
/// other elements that the key element depends upon.
pub dependencies: HashMap<String,HashSet<String>>,
/// Nodes already satisfied. dependencies_of() will prune
/// dependency searches at these nodes, and not output nodes
/// registered here.
pub satisfied: HashSet<String>,
}
#[derive(Copy,Show,PartialEq)]
pub enum SolventError {
CycleDetected,
// TODO once we implement conflicts: Conflict(String)
}
/// This iterates through the dependencies of the DepGraph's target
pub struct DepGraphIterator<'a> {
depgraph: &'a DepGraph,
// Target we are trying to satisfy
target: String,
// Elements already satisfied during this iterator's walk
satisfied: HashSet<String>,
// Current path, for cycle detection
curpath: HashSet<String>,
// Halted. Used so that it can return None after an Err is returned.
halted: bool,
}
impl DepGraph {
/// Create an empty DepGraph.
pub fn new() -> DepGraph
{
DepGraph {
dependencies: HashMap::new(),
satisfied: HashSet::new(),
}
}
/// Add a dependency to a DepGraph. The node does not need
/// to pre-exist, nor do the dependency nodes. But if the
/// node does pre-exist, the depends_on will be added to its
/// existing dependency list.
pub fn register_dependency<'a>( &mut self,
node: &'a str,
depends_on: &'a str )
{
match self.dependencies.entry( &String::from_str(node) ) {
Entry::Vacant(entry) => {
let mut deps = HashSet::with_capacity(1);
deps.insert( String::from_str(depends_on) );
entry.insert( deps );
},
Entry::Occupied(mut entry) => {
(*entry.get_mut()).insert(String::from_str(depends_on));
},
}
}
/// Add multiple dependencies of one node to a DepGraph. The
/// node does not need to pre-exist, nor do the dependency elements.
/// But if the node does pre-exist, the depends_on will be added
/// to its existing dependency list.
pub fn register_dependencies<'a>( &mut self,
node: &'a str,
depends_on: &'a[&'a str] )
{
match self.dependencies.entry( &String::from_str(node) ) {
Entry::Vacant(entry) => {
let mut deps = HashSet::with_capacity( depends_on.len() );
for s in depends_on.iter() {
deps.insert( String::from_str(*s) );
}
entry.insert( deps );
},
Entry::Occupied(mut entry) => {
for s in depends_on.iter() {
(*entry.get_mut()).insert( String::from_str(*s) );
}
},
}
}
/// This marks a node as satisfied. Iterators will not output
/// such nodes.
pub fn mark_as_satisfied<'a>( &mut self,
nodes: &'a[&'a str] )
{
for node in nodes.iter() {
self.satisfied.insert(String::from_str(*node));
}
}
/// Get an iterator to iterate through the dependencies of
/// the target node.
pub fn dependencies_of<'a>(&'a self, target: &str) -> DepGraphIterator<'a>
{
// TODO: iterator's satisfied could start empty, and all checks
// could separately check depgraph's and iterator's. That
// would avoid the copy.
DepGraphIterator {
depgraph: self,
target: target.to_owned(),
satisfied: self.satisfied.clone(),
curpath: HashSet::new(),
halted: false,
}
}
}
impl<'a> DepGraphIterator<'a> {
fn get_next_dependency(&mut self, node: &String) -> Result<String,SolventError>
{
if self.curpath.contains(node) {
return Err(SolventError::CycleDetected);
}
self.curpath.insert(node.clone());
let deplist = match self.depgraph.dependencies.get(node) {
None => return Ok(node.clone()),
Some(deplist) => deplist.clone() // ouch
};
for n in deplist.iter() {
// Prune satisfied nodes
if self.satisfied.contains(n) {
continue;
}
return self.get_next_dependency(n);
}
// nodes dependencies are satisfied
Ok(node.clone())
}
}
impl<'a> Iterator for DepGraphIterator<'a> {
type Item = Result<String,SolventError>;
/// Get next dependency. Returns None when finished. If
/// Some(Err(SolventError)) occurs, all subsequent calls will
/// return None.
fn next(&mut self) -> Option< Result<String,SolventError> >
{
if self.halted {
return None;
}
let node = self.target.clone();
if self.satisfied.contains(&node) {
return None;
}
self.curpath.clear();
let next = match self.get_next_dependency(&node) {
Ok(d) => d,
Err(e) => {
self.halted = true;
return Some(Err(e));
}
};
self.satisfied.insert(next.clone());
Some(Ok(next))
}
}
#[test]
fn solvent_test_branching() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependencies("a",&["b","c","d"]);
depgraph.register_dependency("b","d");
depgraph.register_dependencies("c",&["e","m","g"]);
depgraph.register_dependency("e","f");
depgraph.register_dependency("g","h");
depgraph.register_dependency("h","i");
depgraph.register_dependencies("i",&["j","k"]);
depgraph.register_dependencies("k",&["l","m"]);
depgraph.register_dependency("m","n");
let mut results: Vec<String> = Vec::new();
for node in depgraph.dependencies_of("a") {
// detect infinite looping bugs
assert!(results.len() < 30);
let n = node.unwrap();
// Check that all of that nodes dependencies have already been output
let deps: Option<&HashSet<String>> = depgraph.dependencies.get(&n);
if deps.is_some() {
for dep in deps.unwrap().iter() {
assert!( results.contains(dep) );
}
}
results.push(n.clone());
}
// Be sure we actually output enough stuff
assert!(results.len() == 14);
// Be sure each output is unique
for result in results.iter() {
let mut count = 0u;
for result2 in results.iter() {
if result == result2 { count = count + 1; }
}
assert!(count == 1);
}
}
#[test]
fn solvent_test_updating_dependencies() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependencies("a",&["b","c"]);
depgraph.register_dependency("a","d");
assert!(depgraph.dependencies.get("a").unwrap().contains("b"));
assert!(depgraph.dependencies.get("a").unwrap().contains("c"));
assert!(depgraph.dependencies.get("a").unwrap().contains("d"));
}
#[test]
fn solvent_test_circular() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependency("a","b");
depgraph.register_dependency("b","c");
depgraph.register_dependency("c","a");
for node in depgraph.dependencies_of("a") {
assert!(node.is_err());
assert!(node.unwrap_err() == SolventError::CycleDetected);
}
}
#[test]
fn solvent_test_satisfied_stoppage() {
let mut depgraph: DepGraph = DepGraph::new();
depgraph.register_dependencies("superconn", &[]);
depgraph.register_dependencies("owneruser", &["superconn"]);
depgraph.register_dependencies("appuser", &["superconn"]);
depgraph.register_dependencies("database", &["owneruser"]);
depgraph.register_dependencies("ownerconn", &["database","owneruser"]);
depgraph.register_dependencies("adminconn", &["database"]);
depgraph.register_dependencies("extensions", &["database","adminconn"]);
depgraph.register_dependencies("schema_table", &["database","ownerconn"]);
depgraph.register_dependencies("schemas", &["ownerconn","extensions","schema_table","appuser"]);
depgraph.register_dependencies("appconn", &["database","appuser","schemas"]);
depgraph.mark_as_satisfied(&["owneruser","appuser"]);
let mut results: Vec<String> = Vec::new();
for node in depgraph.dependencies_of("appconn") {
assert!(results.len() < 30);
results.push(node.unwrap());
}
// Be sure we did not depend on these
assert!( !results.contains(&String::from_str("appuser")) );
assert!( !results.contains(&String::from_str("owneruser")) );
assert!( !results.contains(&String::from_str("superconn")) );
// Be sure we actually output enough stuff
assert!(results.len() == 7);
// Be sure each output is unique
for result in results.iter() {
let mut count = 0u;
for result2 in results.iter() {
if result == result2 { count = count + 1; }
}
assert!(count == 1);
}
}
|
// Copyright 2015 The rust-pcre authors.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate enum_set;
extern crate libc;
extern crate libpcre_sys;
use enum_set::{CLike, EnumSet};
use libc::{c_char, c_int, c_uchar, c_void};
use std::collections::{BTreeMap};
use std::ffi::{CStr, CString};
use std::marker::{PhantomData};
use std::option::{Option};
use std::ptr;
use std::result::{Result};
use std::string::{String};
use std::vec::{Vec};
mod detail;
#[derive(Clone)]
pub enum CompileOption {
Caseless = 0x00000001,
Multiline = 0x00000002,
DotAll = 0x00000004,
Extended = 0x00000008,
Anchored = 0x00000010,
DollarEndOnly = 0x00000020,
Extra = 0x00000040,
Ungreedy = 0x00000200,
NoAutoCapture = 0x00001000,
AutoCallout = 0x00004000,
FirstLine = 0x00040000,
DupNames = 0x00080000,
NewlineCR = 0x00100000,
NewlineLF = 0x00200000,
NewlineCRLF = 0x00300000,
NewlineAny = 0x00400000,
NewlineAnyCRLF = 0x00500000,
BsrAnyCRLF = 0x00800000,
BsrUnicode = 0x01000000,
JavaScriptCompat = 0x02000000,
Ucp = 0x20000000
}
#[derive(Clone)]
pub enum ExecOption {
ExecAnchored = 0x00000010,
ExecNotBol = 0x00000080,
ExecNotEol = 0x00000100,
ExecNotEmpty = 0x00000400,
ExecPartialSoft = 0x00008000,
ExecNewlineCR = 0x00100000,
ExecNewlineLF = 0x00200000,
ExecNewlineCRLF = 0x00300000,
ExecNewlineAny = 0x00400000,
ExecNewlineAnyCRLF = 0x00500000,
ExecBsrAnyCRLF = 0x00800000,
ExecBsrUnicode = 0x01000000,
ExecNoStartOptimise = 0x04000000,
ExecPartialHard = 0x08000000,
ExecNotEmptyAtStart = 0x10000000
}
#[allow(non_upper_case_globals)]
pub const ExecPartial: ExecOption = ExecOption::ExecPartialSoft;
#[allow(non_upper_case_globals)]
pub const ExecNoStartOptimize: ExecOption = ExecOption::ExecNoStartOptimise;
#[derive(Clone)]
pub enum StudyOption {
StudyJitCompile = 0x0001,
StudyJitPartialSoftCompile = 0x0002,
StudyJitPartialHardCompile = 0x0004,
/// Always create an extra block. Note: Requires PCRE version 8.32 or later.
StudyExtraNeeded = 0x0008
}
#[derive(Debug)]
pub struct CompilationError {
opt_err: Option<String>,
erroffset: c_int
}
/// Wrapper for libpcre's `pcre` object (representing a compiled regular expression).
#[allow(raw_pointer_derive)]
#[derive(Debug)]
pub struct Pcre {
code: *const detail::pcre,
extra: *mut PcreExtra,
capture_count_: c_int,
/// A spot to place a pointer-to-mark name string.
mark_: *mut c_uchar
}
pub type PcreExtra = libpcre_sys::pcre_extra;
/// Represents a match of a subject string against a regular expression.
pub struct Match<'a> {
subject: &'a str,
partial_ovector: Vec<c_int>,
string_count_: c_int
}
/// Iterator type for iterating matches within a subject string.
pub struct MatchIterator<'a, 'p> {
code: *const detail::pcre,
extra: *const PcreExtra,
capture_count: c_int,
subject: &'a str,
offset: c_int,
options: EnumSet<ExecOption>,
ovector: Vec<c_int>,
_marker: PhantomData<&'p mut Pcre>
}
impl CLike for CompileOption {
unsafe fn from_u32(n: u32) -> CompileOption {
use CompileOption::*;
match n {
1 => Caseless,
2 => Multiline,
3 => DotAll,
4 => Extended,
5 => Anchored,
6 => DollarEndOnly,
7 => Extra,
8 => Ungreedy,
9 => NoAutoCapture,
10 => AutoCallout,
11 => FirstLine,
12 => DupNames,
13 => NewlineCR,
14 => NewlineLF,
15 => NewlineCRLF,
16 => NewlineAny,
17 => NewlineAnyCRLF,
18 => BsrAnyCRLF,
19 => BsrUnicode,
20 => JavaScriptCompat,
21 => Ucp,
_ => panic!("unknown CompileOption number {}", n)
}
}
fn to_u32(&self) -> u32 {
use CompileOption::*;
match *self {
Caseless => 1,
Multiline => 2,
DotAll => 3,
Extended => 4,
Anchored => 5,
DollarEndOnly => 6,
Extra => 7,
Ungreedy => 8,
NoAutoCapture => 9,
AutoCallout => 10,
FirstLine => 11,
DupNames => 12,
NewlineCR => 13,
NewlineLF => 14,
NewlineCRLF => 15,
NewlineAny => 16,
NewlineAnyCRLF => 17,
BsrAnyCRLF => 18,
BsrUnicode => 19,
JavaScriptCompat => 20,
Ucp => 21
}
}
}
impl CLike for ExecOption {
unsafe fn from_u32(n: u32) -> ExecOption {
use ExecOption::*;
match n {
1 => ExecAnchored,
2 => ExecNotBol,
3 => ExecNotEol,
4 => ExecNotEmpty,
5 => ExecPartialSoft,
6 => ExecNewlineCR,
7 => ExecNewlineLF,
8 => ExecNewlineCRLF,
9 => ExecNewlineAny,
10 => ExecNewlineAnyCRLF,
11 => ExecBsrAnyCRLF,
12 => ExecBsrUnicode,
13 => ExecNoStartOptimise,
14 => ExecPartialHard,
15 => ExecNotEmptyAtStart,
_ => panic!("unknown ExecOption number {}", n)
}
}
fn to_u32(&self) -> u32 {
use ExecOption::*;
match *self {
ExecAnchored => 1,
ExecNotBol => 2,
ExecNotEol => 3,
ExecNotEmpty => 4,
ExecPartialSoft => 5,
ExecNewlineCR => 6,
ExecNewlineLF => 7,
ExecNewlineCRLF => 8,
ExecNewlineAny => 9,
ExecNewlineAnyCRLF => 10,
ExecBsrAnyCRLF => 11,
ExecBsrUnicode => 12,
ExecNoStartOptimise => 13,
ExecPartialHard => 14,
ExecNotEmptyAtStart => 15
}
}
}
impl CLike for StudyOption {
unsafe fn from_u32(n: u32) -> StudyOption {
use StudyOption::*;
match n {
1 => StudyJitCompile,
2 => StudyJitPartialSoftCompile,
3 => StudyJitPartialHardCompile,
4 => StudyExtraNeeded,
_ => panic!("unknown StudyOption number {}", n)
}
}
fn to_u32(&self) -> u32 {
use StudyOption::*;
match *self {
StudyJitCompile => 1,
StudyJitPartialSoftCompile => 2,
StudyJitPartialHardCompile => 3,
StudyExtraNeeded => 4
}
}
}
impl CompilationError {
pub fn message(&self) -> Option<String> {
self.opt_err.clone()
}
pub fn offset(&self) -> usize {
self.erroffset as usize
}
}
impl std::fmt::Display for CompilationError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.opt_err {
None => write!(f, "compilation failed at offset {}", self.erroffset as usize),
Some(ref s) => write!(f, "compilation failed at offset {}: {}", self.erroffset as usize, s)
}
}
}
impl Pcre {
/// Compiles the given regular expression.
///
/// # Argument
/// * `pattern` - The regular expression.
pub fn compile(pattern: &str) -> Result<Pcre, CompilationError> {
let no_options: EnumSet<CompileOption> = EnumSet::new();
Pcre::compile_with_options(pattern, &no_options)
}
/// Compiles a regular expression using the given bitwise-OR'd options `options`.
///
/// # Arguments
/// * `pattern` - The regular expression.
/// * `options` - Bitwise-OR'd compilation options. See the libpcre manpages,
/// `man 3 pcre_compile`, for more information.
pub fn compile_with_options(pattern: &str, options: &EnumSet<CompileOption>) -> Result<Pcre, CompilationError> {
let pattern_cstring = CString::new(pattern).unwrap();
unsafe {
// Use the default character tables.
let tableptr: *const c_uchar = ptr::null();
match detail::pcre_compile(pattern_cstring.as_ptr(), options, tableptr) {
Err((opt_err, erroffset)) => Err(CompilationError {
opt_err: opt_err,
erroffset: erroffset
}),
Ok(mut_code) => {
let code = mut_code as *const detail::pcre;
assert!(!code.is_null());
// Take a reference.
detail::pcre_refcount(code as *mut detail::pcre, 1);
let extra: *mut PcreExtra = ptr::null_mut();
let mut capture_count: c_int = 0;
detail::pcre_fullinfo(code, extra as *const PcreExtra, libpcre_sys::PCRE_INFO_CAPTURECOUNT,
&mut capture_count as *mut c_int as *mut c_void);
Ok(Pcre {
code: code,
extra: extra,
capture_count_: capture_count,
mark_: ptr::null_mut()
})
}
}
}
}
/// Returns the number of capture groups in the regular expression, including one for
/// each named capture group.
///
/// This count does not include "group 0", which is the full substring within a subject
/// string that matches the regular expression.
///
/// # See also
/// * [name_count()](#method.name_count) - Returns the number of named capture groups.
pub fn capture_count(&self) -> usize {
self.capture_count_ as usize
}
/// Enables the use of the mark field when matching the compiled regular expression. The
/// pattern must have been previously studied and an extra block must have been created.
///
/// To ensure that an extra block has been created, call [study_with_options()](#method.study_with_options)
/// passing the [`StudyExtraNeeded`](enum.StudyOption.html#variant.StudyExtraNeeded) study option.
///
/// # Return value
/// `true` if the use of the mark field could be enabled. `false` otherwise, which signifies
/// that an extra block needs to be created.
pub fn enable_mark(&mut self) -> bool {
unsafe {
if self.extra.is_null() {
false
} else {
(*self.extra).set_mark(&mut self.mark_);
true
}
}
}
/// Returns the extra block, if one has been created.
pub fn extra(&mut self) -> Option<&mut PcreExtra> {
unsafe {
if self.extra.is_null() {
None
} else {
Some(&mut *(self.extra))
}
}
}
/// Matches the compiled regular expression against a given subject string `subject`.
/// If no match is found, then `None` is returned. Otherwise, a `Match` object is returned
/// which provides access to the captured substrings as slices of the subject string.
///
/// # Argument
/// * `subject` - The subject string.
///
/// # Performance notes
/// This method is intended to be used to find individual matches. If multiple matches
/// are desired, then a `MatchIterator` should be used because it is more efficient.
///
/// If a regular expression will be used often, it might be worth studying it to possibly
/// speed up matching. See the [study()](#method.study) method.
#[inline]
pub fn exec<'a, 'p>(&'p mut self, subject: &'a str) -> Option<Match<'a>> {
self.exec_from(subject, 0)
}
/// Matches the compiled regular expression against a given subject string `subject`
/// starting at offset `startoffset` within the subject string. If no match is found,
/// then `None` is returned. Otherwise, a `Match` object is returned which provides
/// access to the captured substrings as slices of the subject string.
///
/// # Arguments
/// * `subject` - The subject string.
/// * `startoffset` - Starting offset within `subject` at which to begin looking for
/// a match.
///
/// # Performance notes
/// This method is intended to be used to find individual matches. If multiple matches
/// are desired, then a `MatchIterator` should be used because it is more efficient.
///
/// If a regular expression will be used often, it might be worth studying it to possibly
/// speed up matching. See the [study()](#method.study) method.
#[inline]
pub fn exec_from<'a, 'p>(&'p mut self, subject: &'a str, startoffset: usize) -> Option<Match<'a>> {
let no_options: EnumSet<ExecOption> = EnumSet::new();
self.exec_from_with_options(subject, startoffset, &no_options)
}
/// Matches the compiled regular expression against a given subject string `subject`
/// starting at offset `startoffset` within the subject string and using the given
/// bitwise-OR'd matching options `options`. If no match is found, then `None` is
/// returned. Otherwise, a `Match` object is returned which provides access to the
/// captured substrings as slices of the subject string.
///
/// # Arguments
/// * `subject` - The subject string.
/// * `startoffset` - Starting offset within `subject` at which to begin looking for
/// a match.
/// * `options` - Bitwise-OR'd matching options. See the libpcre manpages, `man 3 pcre_exec`,
/// for more information.
///
/// # Performance notes
/// This method is intended to be used to find individual matches. If multiple matches
/// are desired, then a `MatchIterator` should be used because it is more efficient.
///
/// If a regular expression will be used often, it might be worth studying it to possibly
/// speed up matching. See the [study()](#method.study) method.
#[inline]
pub fn exec_from_with_options<'a, 'p>(&'p mut self, subject: &'a str, startoffset: usize, options: &EnumSet<ExecOption>) -> Option<Match<'a>> {
let ovecsize = (self.capture_count_ + 1) * 3;
let mut ovector = vec![0 as c_int; ovecsize as usize];
unsafe {
let rc = detail::pcre_exec(self.code,
self.extra as *const PcreExtra,
subject.as_ptr() as *const c_char,
subject.len() as c_int,
startoffset as c_int,
options,
ovector.as_mut_ptr(),
ovecsize as c_int);
if rc >= 0 {
Some(Match {
subject: subject,
partial_ovector: ovector[..(((self.capture_count_ + 1) * 2) as usize)].to_vec(),
string_count_: rc
})
} else {
None
}
}
}
/// Returns the mark name from PCRE if set.
///
/// # Return value
/// `Some(str)` if PCRE returned a value for the mark.
/// `None` if either there was no mark set or [enable_mark()](#method.enable_mark) was not called,
/// or was unsuccessful.
#[inline]
pub fn mark(&self) -> Option<String> {
self.mark_bytes().map (|bytes| String::from_utf8(Vec::from(bytes)).unwrap())
}
/// Returns the mark name from PCRE if set.
///
/// # Return value
/// `Some(&[u8])` if PCRE returned a value for the mark.
/// `None` if either there was no mark set or [enable_mark()](#method.enable_mark) was not called,
/// or was unsuccessful.
#[inline]
pub fn mark_bytes(&self) -> Option<&[u8]> {
unsafe {
if self.mark_.is_null() {
None
} else {
let mark_cstr = CStr::from_ptr(self.mark_ as *const c_char);
Some(mark_cstr.to_bytes())
}
}
}
/// Creates a `MatchIterator` for iterating through matches within the given subject
/// string `subject`.
///
/// # Argument
/// * `subject` - The subject string.
#[inline]
pub fn matches<'a, 'p>(&'p mut self, subject: &'a str) -> MatchIterator<'a, 'p> {
let no_options: EnumSet<ExecOption> = EnumSet::new();
self.matches_with_options(subject, &no_options)
}
/// Creates a `MatchIterator` for iterating through matches within the given subject
/// string `subject` using the given bitwise-OR'd matching options `options`.
///
/// # Arguments
/// * `subject` - The subject string.
/// * `options` - Bitwise-OR'd matching options. See the libpcre manpages, `man 3 pcre_exec`,
/// for more information.
#[inline]
pub fn matches_with_options<'a, 'p>(&'p mut self, subject: &'a str, options: &EnumSet<ExecOption>) -> MatchIterator<'a, 'p> {
unsafe {
let ovecsize = (self.capture_count_ + 1) * 3;
MatchIterator {
code: { detail::pcre_refcount(self.code as *mut detail::pcre, 1); self.code },
extra: self.extra as *const PcreExtra,
capture_count: self.capture_count_,
subject: subject,
offset: 0,
options: options.clone(),
ovector: vec![0 as c_int; ovecsize as usize],
_marker: PhantomData
}
}
}
/// Returns the number of named capture groups in the regular expression.
pub fn name_count(&self) -> usize {
unsafe {
let mut name_count: c_int = 0;
detail::pcre_fullinfo(self.code, self.extra as *const PcreExtra, libpcre_sys::PCRE_INFO_NAMECOUNT, &mut name_count as *mut c_int as *mut c_void);
name_count as usize
}
}
/// Creates a name-to-number translation table that maps the name of each named capture
/// group to the assigned group numbers.
///
/// The value type of the returned `BTreeMap` is a `usize` vector because there can be
/// more than one group number for a given name if the PCRE_DUPNAMES option is used
/// when compiling the regular expression.
pub fn name_table(&self) -> BTreeMap<String, Vec<usize>> {
unsafe {
let name_count = self.name_count();
let mut tabptr: *const c_uchar = ptr::null();
detail::pcre_fullinfo(self.code, self.extra as *const PcreExtra, libpcre_sys::PCRE_INFO_NAMETABLE, &mut tabptr as *mut *const c_uchar as *mut c_void);
let mut name_entry_size: c_int = 0;
detail::pcre_fullinfo(self.code, self.extra as *const PcreExtra, libpcre_sys::PCRE_INFO_NAMEENTRYSIZE, &mut name_entry_size as *mut c_int as *mut c_void);
let mut name_table: BTreeMap<String, Vec<usize>> = BTreeMap::new();
let mut i = 0;
while i < name_count {
let n: usize = ((ptr::read(tabptr) as usize) << 8) | (ptr::read(tabptr.offset(1)) as usize);
let name_cstr = CStr::from_ptr(tabptr.offset(2) as *const c_char);
// TODO Check memory allocations
let name: String = String::from_utf8(Vec::from(name_cstr.to_bytes())).unwrap();
// TODO Avoid the double lookup.
// https://github.com/mozilla/rust/issues/9068
if !name_table.contains_key(&name) {
name_table.insert(name, vec![n]);
} else {
name_table.get_mut(&name).unwrap().push(n);
}
tabptr = tabptr.offset(name_entry_size as isize);
i += 1;
}
name_table
}
}
/// Studies the regular expression to see if additional information can be extracted
/// which might speed up matching.
///
/// # Return value
/// `true` if additional information could be extracted. `false` otherwise.
pub fn study(&mut self) -> bool {
let no_options: EnumSet<StudyOption> = EnumSet::new();
self.study_with_options(&no_options)
}
/// Studies the regular expression using the given bitwise-OR'd study options `options`
/// to see if additional information can be extracted which might speed up matching.
///
/// # Argument
/// * `options` - Study options. See the libpcre manpages, `man 3 pcre_study`, for more
/// information about each option.
///
/// # Return value
/// `true` if additional information could be extracted or the [`StudyExtraNeeded`](enum.StudyOption.html#variant.StudyExtraNeeded)
/// option was passed. `false` otherwise.
pub fn study_with_options(&mut self, options: &EnumSet<StudyOption>) -> bool {
unsafe {
// If something else has a reference to `code` then it probably has a pointer to
// the current study data (if any). Thus, we shouldn't free the current study data
// in that case.
if detail::pcre_refcount(self.code as *mut detail::pcre, 0) != 1 {
false
} else {
// Free any current study data.
detail::pcre_free_study(self.extra as *mut PcreExtra);
self.extra = ptr::null_mut();
let extra = detail::pcre_study(self.code, options);
self.extra = extra;
!extra.is_null()
}
}
}
}
impl Drop for Pcre {
fn drop(&mut self) {
unsafe {
if detail::pcre_refcount(self.code as *mut detail::pcre, -1) == 0 {
detail::pcre_free_study(self.extra as *mut PcreExtra);
detail::pcre_free(self.code as *mut detail::pcre as *mut c_void);
}
self.extra = ptr::null_mut();
self.code = ptr::null();
}
}
}
impl<'a> Match<'a> {
/// Returns the start index within the subject string of capture group `n`.
///
/// If the capture group is present in the pattern but wasn't captured then the start of it will be `usize::max_value()`.
/// Happens with the optional groups, `/(optional)?/`.
pub fn group_start(&self, n: usize) -> usize {
self.partial_ovector[(n * 2) as usize] as usize
}
/// Returns the end index within the subject string of capture group `n`.
///
/// If the capture group is present in the pattern but wasn't captured then the end of it will be `usize::max_value()`.
/// Happens with the optional groups, `/(optional)?/`.
pub fn group_end(&self, n: usize) -> usize {
self.partial_ovector[(n * 2 + 1) as usize] as usize
}
/// Returns the length of the substring for capture group `n`.
pub fn group_len(&self, n: usize) -> usize {
let group_offsets = &self.partial_ovector[((n * 2) as usize)..];
(group_offsets[1] - group_offsets[0]) as usize
}
/// Returns the substring for capture group `n` as a slice.
#[inline]
pub fn group(&self, n: usize) -> &'a str {
let group_offsets = &self.partial_ovector[((n * 2) as usize)..];
let start = group_offsets[0];
let end = group_offsets[1];
&self.subject[(start as usize)..(end as usize)]
}
/// Returns the number of substrings captured.
pub fn string_count(&self) -> usize {
self.string_count_ as usize
}
}
impl<'a, 'p> Drop for MatchIterator<'a, 'p> {
fn drop(&mut self) {
unsafe {
if detail::pcre_refcount(self.code as *mut detail::pcre, -1) == 0 {
detail::pcre_free_study(self.extra as *mut PcreExtra);
detail::pcre_free(self.code as *mut detail::pcre as *mut c_void);
}
self.extra = ptr::null();
self.code = ptr::null();
}
}
}
impl<'a, 'p> Iterator for MatchIterator<'a, 'p> {
type Item = Match<'a>;
/// Gets the next match.
#[inline]
fn next(&mut self) -> Option<Match<'a>> {
unsafe {
let rc = detail::pcre_exec(self.code,
self.extra,
self.subject.as_ptr() as *const c_char,
self.subject.len() as c_int,
self.offset,
&self.options,
self.ovector.as_mut_ptr(),
self.ovector.len() as c_int);
if rc >= 0 {
// Update the iterator state.
self.offset = self.ovector[1];
Some(Match {
subject: self.subject,
partial_ovector: self.ovector[..(((self.capture_count + 1) * 2) as usize)].to_vec(),
string_count_: rc
})
} else {
None
}
}
}
}
/// Returns libpcre version information.
pub fn pcre_version() -> String {
detail::pcre_version()
}
Remove `#[allow(raw_pointer_derive)]`
// Copyright 2015 The rust-pcre authors.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate enum_set;
extern crate libc;
extern crate libpcre_sys;
use enum_set::{CLike, EnumSet};
use libc::{c_char, c_int, c_uchar, c_void};
use std::collections::{BTreeMap};
use std::ffi::{CStr, CString};
use std::marker::{PhantomData};
use std::option::{Option};
use std::ptr;
use std::result::{Result};
use std::string::{String};
use std::vec::{Vec};
mod detail;
#[derive(Clone)]
pub enum CompileOption {
Caseless = 0x00000001,
Multiline = 0x00000002,
DotAll = 0x00000004,
Extended = 0x00000008,
Anchored = 0x00000010,
DollarEndOnly = 0x00000020,
Extra = 0x00000040,
Ungreedy = 0x00000200,
NoAutoCapture = 0x00001000,
AutoCallout = 0x00004000,
FirstLine = 0x00040000,
DupNames = 0x00080000,
NewlineCR = 0x00100000,
NewlineLF = 0x00200000,
NewlineCRLF = 0x00300000,
NewlineAny = 0x00400000,
NewlineAnyCRLF = 0x00500000,
BsrAnyCRLF = 0x00800000,
BsrUnicode = 0x01000000,
JavaScriptCompat = 0x02000000,
Ucp = 0x20000000
}
#[derive(Clone)]
pub enum ExecOption {
ExecAnchored = 0x00000010,
ExecNotBol = 0x00000080,
ExecNotEol = 0x00000100,
ExecNotEmpty = 0x00000400,
ExecPartialSoft = 0x00008000,
ExecNewlineCR = 0x00100000,
ExecNewlineLF = 0x00200000,
ExecNewlineCRLF = 0x00300000,
ExecNewlineAny = 0x00400000,
ExecNewlineAnyCRLF = 0x00500000,
ExecBsrAnyCRLF = 0x00800000,
ExecBsrUnicode = 0x01000000,
ExecNoStartOptimise = 0x04000000,
ExecPartialHard = 0x08000000,
ExecNotEmptyAtStart = 0x10000000
}
#[allow(non_upper_case_globals)]
pub const ExecPartial: ExecOption = ExecOption::ExecPartialSoft;
#[allow(non_upper_case_globals)]
pub const ExecNoStartOptimize: ExecOption = ExecOption::ExecNoStartOptimise;
#[derive(Clone)]
pub enum StudyOption {
StudyJitCompile = 0x0001,
StudyJitPartialSoftCompile = 0x0002,
StudyJitPartialHardCompile = 0x0004,
/// Always create an extra block. Note: Requires PCRE version 8.32 or later.
StudyExtraNeeded = 0x0008
}
#[derive(Debug)]
pub struct CompilationError {
opt_err: Option<String>,
erroffset: c_int
}
/// Wrapper for libpcre's `pcre` object (representing a compiled regular expression).
#[derive(Debug)]
pub struct Pcre {
code: *const detail::pcre,
extra: *mut PcreExtra,
capture_count_: c_int,
/// A spot to place a pointer-to-mark name string.
mark_: *mut c_uchar
}
pub type PcreExtra = libpcre_sys::pcre_extra;
/// Represents a match of a subject string against a regular expression.
pub struct Match<'a> {
subject: &'a str,
partial_ovector: Vec<c_int>,
string_count_: c_int
}
/// Iterator type for iterating matches within a subject string.
pub struct MatchIterator<'a, 'p> {
code: *const detail::pcre,
extra: *const PcreExtra,
capture_count: c_int,
subject: &'a str,
offset: c_int,
options: EnumSet<ExecOption>,
ovector: Vec<c_int>,
_marker: PhantomData<&'p mut Pcre>
}
impl CLike for CompileOption {
unsafe fn from_u32(n: u32) -> CompileOption {
use CompileOption::*;
match n {
1 => Caseless,
2 => Multiline,
3 => DotAll,
4 => Extended,
5 => Anchored,
6 => DollarEndOnly,
7 => Extra,
8 => Ungreedy,
9 => NoAutoCapture,
10 => AutoCallout,
11 => FirstLine,
12 => DupNames,
13 => NewlineCR,
14 => NewlineLF,
15 => NewlineCRLF,
16 => NewlineAny,
17 => NewlineAnyCRLF,
18 => BsrAnyCRLF,
19 => BsrUnicode,
20 => JavaScriptCompat,
21 => Ucp,
_ => panic!("unknown CompileOption number {}", n)
}
}
fn to_u32(&self) -> u32 {
use CompileOption::*;
match *self {
Caseless => 1,
Multiline => 2,
DotAll => 3,
Extended => 4,
Anchored => 5,
DollarEndOnly => 6,
Extra => 7,
Ungreedy => 8,
NoAutoCapture => 9,
AutoCallout => 10,
FirstLine => 11,
DupNames => 12,
NewlineCR => 13,
NewlineLF => 14,
NewlineCRLF => 15,
NewlineAny => 16,
NewlineAnyCRLF => 17,
BsrAnyCRLF => 18,
BsrUnicode => 19,
JavaScriptCompat => 20,
Ucp => 21
}
}
}
impl CLike for ExecOption {
unsafe fn from_u32(n: u32) -> ExecOption {
use ExecOption::*;
match n {
1 => ExecAnchored,
2 => ExecNotBol,
3 => ExecNotEol,
4 => ExecNotEmpty,
5 => ExecPartialSoft,
6 => ExecNewlineCR,
7 => ExecNewlineLF,
8 => ExecNewlineCRLF,
9 => ExecNewlineAny,
10 => ExecNewlineAnyCRLF,
11 => ExecBsrAnyCRLF,
12 => ExecBsrUnicode,
13 => ExecNoStartOptimise,
14 => ExecPartialHard,
15 => ExecNotEmptyAtStart,
_ => panic!("unknown ExecOption number {}", n)
}
}
fn to_u32(&self) -> u32 {
use ExecOption::*;
match *self {
ExecAnchored => 1,
ExecNotBol => 2,
ExecNotEol => 3,
ExecNotEmpty => 4,
ExecPartialSoft => 5,
ExecNewlineCR => 6,
ExecNewlineLF => 7,
ExecNewlineCRLF => 8,
ExecNewlineAny => 9,
ExecNewlineAnyCRLF => 10,
ExecBsrAnyCRLF => 11,
ExecBsrUnicode => 12,
ExecNoStartOptimise => 13,
ExecPartialHard => 14,
ExecNotEmptyAtStart => 15
}
}
}
impl CLike for StudyOption {
unsafe fn from_u32(n: u32) -> StudyOption {
use StudyOption::*;
match n {
1 => StudyJitCompile,
2 => StudyJitPartialSoftCompile,
3 => StudyJitPartialHardCompile,
4 => StudyExtraNeeded,
_ => panic!("unknown StudyOption number {}", n)
}
}
fn to_u32(&self) -> u32 {
use StudyOption::*;
match *self {
StudyJitCompile => 1,
StudyJitPartialSoftCompile => 2,
StudyJitPartialHardCompile => 3,
StudyExtraNeeded => 4
}
}
}
impl CompilationError {
pub fn message(&self) -> Option<String> {
self.opt_err.clone()
}
pub fn offset(&self) -> usize {
self.erroffset as usize
}
}
impl std::fmt::Display for CompilationError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.opt_err {
None => write!(f, "compilation failed at offset {}", self.erroffset as usize),
Some(ref s) => write!(f, "compilation failed at offset {}: {}", self.erroffset as usize, s)
}
}
}
impl Pcre {
/// Compiles the given regular expression.
///
/// # Argument
/// * `pattern` - The regular expression.
pub fn compile(pattern: &str) -> Result<Pcre, CompilationError> {
let no_options: EnumSet<CompileOption> = EnumSet::new();
Pcre::compile_with_options(pattern, &no_options)
}
/// Compiles a regular expression using the given bitwise-OR'd options `options`.
///
/// # Arguments
/// * `pattern` - The regular expression.
/// * `options` - Bitwise-OR'd compilation options. See the libpcre manpages,
/// `man 3 pcre_compile`, for more information.
pub fn compile_with_options(pattern: &str, options: &EnumSet<CompileOption>) -> Result<Pcre, CompilationError> {
let pattern_cstring = CString::new(pattern).unwrap();
unsafe {
// Use the default character tables.
let tableptr: *const c_uchar = ptr::null();
match detail::pcre_compile(pattern_cstring.as_ptr(), options, tableptr) {
Err((opt_err, erroffset)) => Err(CompilationError {
opt_err: opt_err,
erroffset: erroffset
}),
Ok(mut_code) => {
let code = mut_code as *const detail::pcre;
assert!(!code.is_null());
// Take a reference.
detail::pcre_refcount(code as *mut detail::pcre, 1);
let extra: *mut PcreExtra = ptr::null_mut();
let mut capture_count: c_int = 0;
detail::pcre_fullinfo(code, extra as *const PcreExtra, libpcre_sys::PCRE_INFO_CAPTURECOUNT,
&mut capture_count as *mut c_int as *mut c_void);
Ok(Pcre {
code: code,
extra: extra,
capture_count_: capture_count,
mark_: ptr::null_mut()
})
}
}
}
}
/// Returns the number of capture groups in the regular expression, including one for
/// each named capture group.
///
/// This count does not include "group 0", which is the full substring within a subject
/// string that matches the regular expression.
///
/// # See also
/// * [name_count()](#method.name_count) - Returns the number of named capture groups.
pub fn capture_count(&self) -> usize {
self.capture_count_ as usize
}
/// Enables the use of the mark field when matching the compiled regular expression. The
/// pattern must have been previously studied and an extra block must have been created.
///
/// To ensure that an extra block has been created, call [study_with_options()](#method.study_with_options)
/// passing the [`StudyExtraNeeded`](enum.StudyOption.html#variant.StudyExtraNeeded) study option.
///
/// # Return value
/// `true` if the use of the mark field could be enabled. `false` otherwise, which signifies
/// that an extra block needs to be created.
pub fn enable_mark(&mut self) -> bool {
unsafe {
if self.extra.is_null() {
false
} else {
(*self.extra).set_mark(&mut self.mark_);
true
}
}
}
/// Returns the extra block, if one has been created.
pub fn extra(&mut self) -> Option<&mut PcreExtra> {
unsafe {
if self.extra.is_null() {
None
} else {
Some(&mut *(self.extra))
}
}
}
/// Matches the compiled regular expression against a given subject string `subject`.
/// If no match is found, then `None` is returned. Otherwise, a `Match` object is returned
/// which provides access to the captured substrings as slices of the subject string.
///
/// # Argument
/// * `subject` - The subject string.
///
/// # Performance notes
/// This method is intended to be used to find individual matches. If multiple matches
/// are desired, then a `MatchIterator` should be used because it is more efficient.
///
/// If a regular expression will be used often, it might be worth studying it to possibly
/// speed up matching. See the [study()](#method.study) method.
#[inline]
pub fn exec<'a, 'p>(&'p mut self, subject: &'a str) -> Option<Match<'a>> {
self.exec_from(subject, 0)
}
/// Matches the compiled regular expression against a given subject string `subject`
/// starting at offset `startoffset` within the subject string. If no match is found,
/// then `None` is returned. Otherwise, a `Match` object is returned which provides
/// access to the captured substrings as slices of the subject string.
///
/// # Arguments
/// * `subject` - The subject string.
/// * `startoffset` - Starting offset within `subject` at which to begin looking for
/// a match.
///
/// # Performance notes
/// This method is intended to be used to find individual matches. If multiple matches
/// are desired, then a `MatchIterator` should be used because it is more efficient.
///
/// If a regular expression will be used often, it might be worth studying it to possibly
/// speed up matching. See the [study()](#method.study) method.
#[inline]
pub fn exec_from<'a, 'p>(&'p mut self, subject: &'a str, startoffset: usize) -> Option<Match<'a>> {
let no_options: EnumSet<ExecOption> = EnumSet::new();
self.exec_from_with_options(subject, startoffset, &no_options)
}
/// Matches the compiled regular expression against a given subject string `subject`
/// starting at offset `startoffset` within the subject string and using the given
/// bitwise-OR'd matching options `options`. If no match is found, then `None` is
/// returned. Otherwise, a `Match` object is returned which provides access to the
/// captured substrings as slices of the subject string.
///
/// # Arguments
/// * `subject` - The subject string.
/// * `startoffset` - Starting offset within `subject` at which to begin looking for
/// a match.
/// * `options` - Bitwise-OR'd matching options. See the libpcre manpages, `man 3 pcre_exec`,
/// for more information.
///
/// # Performance notes
/// This method is intended to be used to find individual matches. If multiple matches
/// are desired, then a `MatchIterator` should be used because it is more efficient.
///
/// If a regular expression will be used often, it might be worth studying it to possibly
/// speed up matching. See the [study()](#method.study) method.
#[inline]
pub fn exec_from_with_options<'a, 'p>(&'p mut self, subject: &'a str, startoffset: usize, options: &EnumSet<ExecOption>) -> Option<Match<'a>> {
let ovecsize = (self.capture_count_ + 1) * 3;
let mut ovector = vec![0 as c_int; ovecsize as usize];
unsafe {
let rc = detail::pcre_exec(self.code,
self.extra as *const PcreExtra,
subject.as_ptr() as *const c_char,
subject.len() as c_int,
startoffset as c_int,
options,
ovector.as_mut_ptr(),
ovecsize as c_int);
if rc >= 0 {
Some(Match {
subject: subject,
partial_ovector: ovector[..(((self.capture_count_ + 1) * 2) as usize)].to_vec(),
string_count_: rc
})
} else {
None
}
}
}
/// Returns the mark name from PCRE if set.
///
/// # Return value
/// `Some(str)` if PCRE returned a value for the mark.
/// `None` if either there was no mark set or [enable_mark()](#method.enable_mark) was not called,
/// or was unsuccessful.
#[inline]
pub fn mark(&self) -> Option<String> {
self.mark_bytes().map (|bytes| String::from_utf8(Vec::from(bytes)).unwrap())
}
/// Returns the mark name from PCRE if set.
///
/// # Return value
/// `Some(&[u8])` if PCRE returned a value for the mark.
/// `None` if either there was no mark set or [enable_mark()](#method.enable_mark) was not called,
/// or was unsuccessful.
#[inline]
pub fn mark_bytes(&self) -> Option<&[u8]> {
unsafe {
if self.mark_.is_null() {
None
} else {
let mark_cstr = CStr::from_ptr(self.mark_ as *const c_char);
Some(mark_cstr.to_bytes())
}
}
}
/// Creates a `MatchIterator` for iterating through matches within the given subject
/// string `subject`.
///
/// # Argument
/// * `subject` - The subject string.
#[inline]
pub fn matches<'a, 'p>(&'p mut self, subject: &'a str) -> MatchIterator<'a, 'p> {
let no_options: EnumSet<ExecOption> = EnumSet::new();
self.matches_with_options(subject, &no_options)
}
/// Creates a `MatchIterator` for iterating through matches within the given subject
/// string `subject` using the given bitwise-OR'd matching options `options`.
///
/// # Arguments
/// * `subject` - The subject string.
/// * `options` - Bitwise-OR'd matching options. See the libpcre manpages, `man 3 pcre_exec`,
/// for more information.
#[inline]
pub fn matches_with_options<'a, 'p>(&'p mut self, subject: &'a str, options: &EnumSet<ExecOption>) -> MatchIterator<'a, 'p> {
unsafe {
let ovecsize = (self.capture_count_ + 1) * 3;
MatchIterator {
code: { detail::pcre_refcount(self.code as *mut detail::pcre, 1); self.code },
extra: self.extra as *const PcreExtra,
capture_count: self.capture_count_,
subject: subject,
offset: 0,
options: options.clone(),
ovector: vec![0 as c_int; ovecsize as usize],
_marker: PhantomData
}
}
}
/// Returns the number of named capture groups in the regular expression.
pub fn name_count(&self) -> usize {
unsafe {
let mut name_count: c_int = 0;
detail::pcre_fullinfo(self.code, self.extra as *const PcreExtra, libpcre_sys::PCRE_INFO_NAMECOUNT, &mut name_count as *mut c_int as *mut c_void);
name_count as usize
}
}
/// Creates a name-to-number translation table that maps the name of each named capture
/// group to the assigned group numbers.
///
/// The value type of the returned `BTreeMap` is a `usize` vector because there can be
/// more than one group number for a given name if the PCRE_DUPNAMES option is used
/// when compiling the regular expression.
pub fn name_table(&self) -> BTreeMap<String, Vec<usize>> {
unsafe {
let name_count = self.name_count();
let mut tabptr: *const c_uchar = ptr::null();
detail::pcre_fullinfo(self.code, self.extra as *const PcreExtra, libpcre_sys::PCRE_INFO_NAMETABLE, &mut tabptr as *mut *const c_uchar as *mut c_void);
let mut name_entry_size: c_int = 0;
detail::pcre_fullinfo(self.code, self.extra as *const PcreExtra, libpcre_sys::PCRE_INFO_NAMEENTRYSIZE, &mut name_entry_size as *mut c_int as *mut c_void);
let mut name_table: BTreeMap<String, Vec<usize>> = BTreeMap::new();
let mut i = 0;
while i < name_count {
let n: usize = ((ptr::read(tabptr) as usize) << 8) | (ptr::read(tabptr.offset(1)) as usize);
let name_cstr = CStr::from_ptr(tabptr.offset(2) as *const c_char);
// TODO Check memory allocations
let name: String = String::from_utf8(Vec::from(name_cstr.to_bytes())).unwrap();
// TODO Avoid the double lookup.
// https://github.com/mozilla/rust/issues/9068
if !name_table.contains_key(&name) {
name_table.insert(name, vec![n]);
} else {
name_table.get_mut(&name).unwrap().push(n);
}
tabptr = tabptr.offset(name_entry_size as isize);
i += 1;
}
name_table
}
}
/// Studies the regular expression to see if additional information can be extracted
/// which might speed up matching.
///
/// # Return value
/// `true` if additional information could be extracted. `false` otherwise.
pub fn study(&mut self) -> bool {
let no_options: EnumSet<StudyOption> = EnumSet::new();
self.study_with_options(&no_options)
}
/// Studies the regular expression using the given bitwise-OR'd study options `options`
/// to see if additional information can be extracted which might speed up matching.
///
/// # Argument
/// * `options` - Study options. See the libpcre manpages, `man 3 pcre_study`, for more
/// information about each option.
///
/// # Return value
/// `true` if additional information could be extracted or the [`StudyExtraNeeded`](enum.StudyOption.html#variant.StudyExtraNeeded)
/// option was passed. `false` otherwise.
pub fn study_with_options(&mut self, options: &EnumSet<StudyOption>) -> bool {
unsafe {
// If something else has a reference to `code` then it probably has a pointer to
// the current study data (if any). Thus, we shouldn't free the current study data
// in that case.
if detail::pcre_refcount(self.code as *mut detail::pcre, 0) != 1 {
false
} else {
// Free any current study data.
detail::pcre_free_study(self.extra as *mut PcreExtra);
self.extra = ptr::null_mut();
let extra = detail::pcre_study(self.code, options);
self.extra = extra;
!extra.is_null()
}
}
}
}
impl Drop for Pcre {
fn drop(&mut self) {
unsafe {
if detail::pcre_refcount(self.code as *mut detail::pcre, -1) == 0 {
detail::pcre_free_study(self.extra as *mut PcreExtra);
detail::pcre_free(self.code as *mut detail::pcre as *mut c_void);
}
self.extra = ptr::null_mut();
self.code = ptr::null();
}
}
}
impl<'a> Match<'a> {
/// Returns the start index within the subject string of capture group `n`.
///
/// If the capture group is present in the pattern but wasn't captured then the start of it will be `usize::max_value()`.
/// Happens with the optional groups, `/(optional)?/`.
pub fn group_start(&self, n: usize) -> usize {
self.partial_ovector[(n * 2) as usize] as usize
}
/// Returns the end index within the subject string of capture group `n`.
///
/// If the capture group is present in the pattern but wasn't captured then the end of it will be `usize::max_value()`.
/// Happens with the optional groups, `/(optional)?/`.
pub fn group_end(&self, n: usize) -> usize {
self.partial_ovector[(n * 2 + 1) as usize] as usize
}
/// Returns the length of the substring for capture group `n`.
pub fn group_len(&self, n: usize) -> usize {
let group_offsets = &self.partial_ovector[((n * 2) as usize)..];
(group_offsets[1] - group_offsets[0]) as usize
}
/// Returns the substring for capture group `n` as a slice.
#[inline]
pub fn group(&self, n: usize) -> &'a str {
let group_offsets = &self.partial_ovector[((n * 2) as usize)..];
let start = group_offsets[0];
let end = group_offsets[1];
&self.subject[(start as usize)..(end as usize)]
}
/// Returns the number of substrings captured.
pub fn string_count(&self) -> usize {
self.string_count_ as usize
}
}
impl<'a, 'p> Drop for MatchIterator<'a, 'p> {
fn drop(&mut self) {
unsafe {
if detail::pcre_refcount(self.code as *mut detail::pcre, -1) == 0 {
detail::pcre_free_study(self.extra as *mut PcreExtra);
detail::pcre_free(self.code as *mut detail::pcre as *mut c_void);
}
self.extra = ptr::null();
self.code = ptr::null();
}
}
}
impl<'a, 'p> Iterator for MatchIterator<'a, 'p> {
type Item = Match<'a>;
/// Gets the next match.
#[inline]
fn next(&mut self) -> Option<Match<'a>> {
unsafe {
let rc = detail::pcre_exec(self.code,
self.extra,
self.subject.as_ptr() as *const c_char,
self.subject.len() as c_int,
self.offset,
&self.options,
self.ovector.as_mut_ptr(),
self.ovector.len() as c_int);
if rc >= 0 {
// Update the iterator state.
self.offset = self.ovector[1];
Some(Match {
subject: self.subject,
partial_ovector: self.ovector[..(((self.capture_count + 1) * 2) as usize)].to_vec(),
string_count_: rc
})
} else {
None
}
}
}
}
/// Returns libpcre version information.
pub fn pcre_version() -> String {
detail::pcre_version()
}
|
extern crate euclid;
extern crate time;
use std::mem;
use std::iter;
use std::collections::HashMap;
use std::rc::Rc;
use euclid::{Point2D, Rect, Size2D};
use euclid::{TypedPoint2D, TypedRect, TypedSize2D};
/// Simple 32-bit image container.
///
/// The pixel data structure is RGBA.
#[derive(Clone)]
pub struct ImageBuffer {
/// Image size.
pub size: Size2D<u32>,
/// RGBA pixels, in rows from top left down, len must be width * height.
pub pixels: Vec<u32>,
}
impl ImageBuffer {
/// Build an empty buffer.
pub fn new(width: u32, height: u32) -> ImageBuffer {
ImageBuffer {
size: Size2D::new(width, height),
pixels: iter::repeat(0u32).take((width * height) as usize).collect(),
}
}
fn blank() -> ImageBuffer {
ImageBuffer {
size: Size2D::new(1, 1),
pixels: vec![0xffffffff],
}
}
/// Build the buffer from a function.
pub fn from_fn<F>(width: u32, height: u32, f: F) -> ImageBuffer
where F: Fn(u32, u32) -> u32
{
let pixels = (0..)
.take((width * height) as usize)
.map(|i| f(i % width, i / width))
.collect();
ImageBuffer {
size: Size2D::new(width, height),
pixels: pixels,
}
}
/// Build the buffer from RGBA pixel iterator.
pub fn from_iter<I>(width: u32, height: u32, pixels: &mut I) -> ImageBuffer
where I: Iterator<Item = u32>
{
ImageBuffer {
size: Size2D::new(width, height),
pixels: pixels.take((width * height) as usize).collect(),
}
}
/// Copy all pixels from source buffer to self starting at given coordinates in self.
pub fn copy_from(&mut self, source: &ImageBuffer, x: u32, y: u32) {
let blit_rect = Rect::new(Point2D::new(x, y), source.size);
if let Some(blit_rect) = blit_rect.intersection(&Rect::new(Point2D::new(0, 0), self.size)) {
for y2 in blit_rect.min_y()..blit_rect.max_y() {
for x2 in blit_rect.min_x()..blit_rect.max_x() {
let self_idx = (x2 + y2 * self.size.width) as usize;
let source_idx = ((x2 - x) + (y2 - y) * source.size.width) as usize;
self.pixels[self_idx] = source.pixels[source_idx];
}
}
}
}
pub fn get_pixel(&self, x: u32, y: u32) -> u32 {
self.pixels[(x + y * self.size.width) as usize]
}
}
/// Builder for Vitral `State` structure.
pub struct Builder<T> {
user_font: Option<Rc<FontData<T>>>,
user_solid: Option<ImageData<T>>,
}
impl<T> Builder<T>
where T: Clone + Eq
{
pub fn new() -> Builder<T> {
Builder {
user_font: None,
user_solid: None,
}
}
/// Set a different font as the default font.
pub fn default_font(mut self, font: Rc<FontData<T>>) -> Builder<T> {
self.user_font = Some(font);
self
}
/// Give your own `ImageData` for the solid texture.
///
/// You want to use this if you have an image atlas and you want to have both drawing solid
/// shapes and textured shapes use the same texture resource and go to the same draw batch.
pub fn solid_texture(mut self, solid: ImageData<T>) -> Builder<T> {
self.user_solid = Some(solid);
self
}
fn build_default_font<F>(&self, make_t: &mut F) -> FontData<T>
where F: FnMut(ImageBuffer) -> T
{
static DEFAULT_FONT: &'static [u8] = include_bytes!("font-96x48.raw");
let (char_width, char_height) = (6, 8);
let (width, height) = (char_width * 16, char_height * 6);
let start_char = 32;
let end_char = 127;
let columns = width / char_width;
let img = ImageBuffer::from_fn(width, height, |x, y| {
let a = DEFAULT_FONT[(x + y * width) as usize] as u32;
(a << 24) | (a << 16) | (a << 8) | a
});
let t = make_t(img);
let mut map = HashMap::new();
for i in start_char..end_char {
let x = char_width * ((i - start_char) % columns);
let y = char_height * ((i - start_char) / columns);
let tex_coords = Rect::new(Point2D::new(x as f32 / width as f32,
y as f32 / height as f32),
Size2D::new(char_width as f32 / width as f32,
char_height as f32 / height as f32));
map.insert(std::char::from_u32(i).unwrap(),
CharData {
image: ImageData {
texture: t.clone(),
size: Size2D::new(char_width, char_height),
tex_coords: tex_coords,
},
draw_offset: Point2D::new(0.0, 0.0),
advance: char_width as f32,
});
}
FontData {
chars: map,
height: char_height as f32,
}
}
/// Construct an interface context instance.
///
/// Needs to be provided a texture creation function. If the user has not specified them
/// earlier, this will be used to construct a separate texture for the solid color and a
/// default font texture.
pub fn build<F, V>(self, screen_size: Size2D<f32>, mut make_t: F) -> State<T, V>
where F: FnMut(ImageBuffer) -> T
{
let font;
if let Some(user_font) = self.user_font {
font = user_font
} else {
font = Rc::new(self.build_default_font(&mut make_t));
}
let solid;
if let Some(user_solid) = self.user_solid {
solid = user_solid;
} else {
solid = ImageData {
texture: make_t(ImageBuffer::blank()),
size: Size2D::new(1, 1),
tex_coords: Rect::new(Point2D::new(0.0, 0.0), Size2D::new(1.0, 1.0)),
};
}
State::new(solid, screen_size, font)
}
}
/// An immediate mode graphical user interface context.
///
/// The context persists over a frame and receives commands that combine GUI
/// description and input handling. At the end of the frame, the commands are
/// converted into rendering instructions for the GUI.
pub struct State<T, V> {
draw_list: Vec<DrawBatch<T, V>>,
pub layout_pos: Point2D<f32>,
mouse_pos: Point2D<f32>,
click_state: [ClickState; 3],
// Make this Rc so it can be passed outside without copying and used as a reference in a
// mutable op.
default_font: Rc<FontData<T>>,
solid_texture: ImageData<T>,
text_input: Vec<KeyInput>,
tick: u64,
clip_stack: Vec<Rect<f32>>,
screen_size: Size2D<f32>,
}
impl<T, V> State<T, V>
where T: Clone + Eq
{
fn new(solid_texture: ImageData<T>,
screen_size: Size2D<f32>,
default_font: Rc<FontData<T>>)
-> State<T, V> {
State {
draw_list: Vec::new(),
// solid_texture: Image(0),
layout_pos: Point2D::new(0.0, 0.0),
mouse_pos: Point2D::new(0.0, 0.0),
click_state: [ClickState::Unpressed, ClickState::Unpressed, ClickState::Unpressed],
default_font: default_font,
solid_texture: solid_texture,
text_input: Vec::new(),
tick: 0,
clip_stack: Vec::new(),
screen_size: screen_size,
}
}
/// Push vertex into the draw batch, return its index offset.
///
/// Index offsets are guaranteed to be consecutive and ascending as long as the current draw
/// batch has not been switched, so you can grab the return value from the first `vertex_push`
/// and express the rest by adding offsets to it.
#[inline(always)]
fn push_vertex(&mut self, vtx: V) -> u16 {
let idx = self.draw_list.len() - 1;
let batch = &mut self.draw_list[idx];
let idx_offset = batch.vertices.len() as u16;
batch.vertices.push(vtx);
idx_offset
}
#[inline(always)]
fn push_triangle(&mut self, i1: u16, i2: u16, i3: u16) {
let idx = self.draw_list.len() - 1;
let batch = &mut self.draw_list[idx];
batch.triangle_indices.push(i1);
batch.triangle_indices.push(i2);
batch.triangle_indices.push(i3);
}
/// Push a clipping rectangle into the clip stack.
fn push_clip_rect(&mut self, area: Rect<f32>) {
self.clip_stack.push(area);
self.check_batch(None);
}
/// Pop the last clipping rectangle from the clip stack.
///
/// The clip stack must have had at least one rectangle added with `push_clip_rect`.
fn pop_clip_rect(&mut self) -> Rect<f32> {
self.clip_stack.pop().expect("Popping an empty clip stack")
}
/// Return current clip rectangle, if any.
fn clip_rect(&self) -> Option<Rect<f32>> {
if self.clip_stack.is_empty() {
None
} else {
Some(self.clip_stack[self.clip_stack.len() - 1])
}
}
fn start_solid_texture(&mut self) {
let t = self.solid_texture.texture.clone();
self.start_texture(t);
}
fn solid_texture_texcoord(&self) -> Point2D<f32> {
self.solid_texture.tex_coords.origin
}
/// Ensure that there current draw batch has solid texture.
fn start_texture(&mut self, texture: T) {
self.check_batch(Some(texture));
}
fn current_batch_is_invalid(&self, texture: T) -> bool {
if self.draw_list.is_empty() {
return true;
}
if self.draw_list[self.draw_list.len() - 1].texture != texture {
return true;
}
if self.draw_list[self.draw_list.len() - 1].clip != self.clip_rect() {
return true;
}
// Getting too close to u16 limit for comfort.
if self.draw_list[self.draw_list.len() - 1].vertices.len() > (1 << 15) {
return true;
}
false
}
/// Start a new render batch if needed.
///
/// Need to start a new batch if render state has changed or if the current one is growing too
/// large for the u16 indices.
fn check_batch(&mut self, texture_needed: Option<T>) {
if texture_needed.is_none() && self.draw_list.is_empty() {
// Do nothing for stuff that only affects ongoing drawing.
return;
}
let texture = texture_needed.unwrap_or_else(|| {
self.draw_list[self.draw_list.len() - 1].texture.clone()
});
let clip = self.clip_rect();
if self.current_batch_is_invalid(texture.clone()) {
self.draw_list.push(DrawBatch {
texture: texture,
clip: clip,
vertices: Vec::new(),
triangle_indices: Vec::new(),
});
}
}
}
/// Command interface for a Vitral GUI.
pub trait Context: Sized {
type T: Clone + Eq;
type V;
/// Return the internal GUI state.
///
/// This is mostly intended for other trait methods, not for direct use.
fn state<'a>(&'a self) -> &'a State<Self::T, Self::V>;
/// Return mutable reference to the internal GUI state.
///
/// This is mostly intended for other trait methods, not for direct use.
fn state_mut<'a>(&'a mut self) -> &'a mut State<Self::T, Self::V>;
/// Construct a new vertex.
///
/// Properties other than position and texture coordinate are provided by the implementation.
fn new_vertex(&mut self,
pos: Point2D<f32>,
tex_coord: Point2D<f32>,
color: [f32; 4])
-> Self::V;
/// Return reference to the currently active font.
fn current_font<'a>(&'a mut self) -> Rc<FontData<Self::T>> {
self.state().default_font.clone()
}
fn push_raw_vertex(&mut self, vertex: Self::V) -> u16 {
self.state_mut().push_vertex(vertex)
}
fn push_vertex<U: ConvertibleUnit>(&mut self,
pos: TypedPoint2D<f32, U>,
tex_coord: Point2D<f32>,
color: [f32; 4])
-> u16 {
let pos = ConvertibleUnit::convert_point(&self.scale_factor(), pos);
// NB: Transform is called on incoming vertices here, if any other place is pushing
// vertices to the underlying state, make sure they go through `transform` as well.
let pos = self.transform(pos);
let v = self.new_vertex(pos, tex_coord, color);
self.state_mut().push_vertex(v)
}
/// Transform point from the space of this context to global space.
fn transform(&self, in_pos: Point2D<f32>) -> Point2D<f32> {
in_pos
}
fn draw_line<U: ConvertibleUnit>(&mut self,
thickness: f32,
color: [f32; 4],
p1: TypedPoint2D<f32, U>,
p2: TypedPoint2D<f32, U>) {
if p1 == p2 {
return;
}
// Convert to screen space here because before applying thickness so that thickness will
// always be in pixel units.
let p1 = ConvertibleUnit::convert_point(&self.scale_factor(), p1);
let p2 = ConvertibleUnit::convert_point(&self.scale_factor(), p2);
self.state_mut().start_solid_texture();
let t = self.state().solid_texture_texcoord();
// Displacements from the one-dimensional base line.
let mut front = p2 - p1;
front = front / front.dot(front).sqrt() * (thickness / 2.0);
let side = TypedPoint2D::new(-front.y, front.x);
let q1 = p1 - side - front + Point2D::new(0.5, 0.5);
let q2 = p1 + side - front + Point2D::new(0.5, 0.5);
let q3 = p2 + side + front + Point2D::new(0.5, 0.5);
let q4 = p2 - side + front + Point2D::new(0.5, 0.5);
let idx = self.push_vertex(q1, t, color);
self.push_vertex(q2, t, color);
self.push_vertex(q3, t, color);
self.push_vertex(q4, t, color);
self.state_mut().push_triangle(idx, idx + 1, idx + 2);
self.state_mut().push_triangle(idx, idx + 2, idx + 3);
}
fn draw_tex_rect<U: ConvertibleUnit>(&mut self,
area: TypedRect<f32, U>,
tex_coords: Rect<f32>,
color: [f32; 4]) {
let idx = self.push_vertex(area.origin, tex_coords.origin, color);
self.push_vertex(area.top_right(), tex_coords.top_right(), color);
self.push_vertex(area.bottom_right(), tex_coords.bottom_right(), color);
self.push_vertex(area.bottom_left(), tex_coords.bottom_left(), color);
self.state_mut().push_triangle(idx, idx + 1, idx + 2);
self.state_mut().push_triangle(idx, idx + 2, idx + 3);
}
fn fill_rect<U: ConvertibleUnit>(&mut self, area: TypedRect<f32, U>, color: [f32; 4]) {
self.state_mut().start_solid_texture();
self.draw_tex_rect(area,
Rect::new(Point2D::new(0.0, 0.0), Size2D::new(0.0, 0.0)),
color);
}
fn draw_image<U: ConvertibleUnit>(&mut self,
image: &ImageData<Self::T>,
pos: TypedPoint2D<f32, U>,
color: [f32; 4]) {
let pos = ConvertibleUnit::convert_point(&self.scale_factor(), pos);
self.state_mut().start_texture(image.texture.clone());
let size = Size2D::new(image.size.width as f32, image.size.height as f32);
self.draw_tex_rect(Rect::new(pos, size), image.tex_coords, color);
}
/// Draw a line of text to screen.
///
/// The `align` parameter indicates whether pos is interpreted as top left, top middle or top
/// right position of the string.
///
/// The return value is the position for the next line.
fn draw_text<U: ConvertibleUnit>(&mut self,
pos: TypedPoint2D<f32, U>,
align: Align,
color: [f32; 4],
text: &str)
-> TypedPoint2D<f32, U> {
// Convert to pixel space here, because font offsetting will operate in pixel space.
let mut pixel_pos = ConvertibleUnit::convert_point(&self.scale_factor(), pos);
pixel_pos.x -= match align {
Align::Left => 0.0,
Align::Center => self.current_font().str_width(text) / 2.0,
Align::Right => self.current_font().str_width(text),
};
for c in text.chars() {
// XXX: Gratuitous cloning because of borrow checker.
let x = self.current_font().chars.get(&c).cloned();
// TODO: Draw some sort of symbol for characters missing from font.
if let Some(f) = x {
self.draw_image(&f.image, pixel_pos - f.draw_offset, color);
pixel_pos.x += f.advance;
}
}
let (_, delta) = U::from_pixel_scale(&self.scale_factor(), 0.0, self.current_font().height);
TypedPoint2D::new(pos.x, pos.y + delta)
}
/// Return the mouse input state for the current bounds area.
fn click_state(&self) -> ButtonAction {
let is_hovering = self.global_bounds().contains(&self.mouse_pos());
let left_press = self.state().click_state[MouseButton::Left as usize].is_pressed() &&
is_hovering;
let right_press = self.state().click_state[MouseButton::Right as usize].is_pressed() &&
is_hovering;
let is_pressed = left_press || right_press;
// Determine the return value.
if left_press && self.state().click_state[MouseButton::Left as usize].is_release() {
ButtonAction::LeftClicked
} else if right_press && self.state().click_state[MouseButton::Right as usize].is_release() {
ButtonAction::RightClicked
} else if is_pressed {
ButtonAction::Pressed
} else if is_hovering {
ButtonAction::Hover
} else {
ButtonAction::Inert
}
}
/// Draw a button in the current bounds
fn button(&mut self, caption: &str) -> ButtonAction {
let ret = self.click_state();
// Choose color.
// TODO: Way to parametrize UI colors in style data.
let color = match ret {
ButtonAction::Pressed => [1.0, 1.0, 0.0, 1.0],
ButtonAction::Hover => [0.5, 1.0, 0.0, 1.0],
_ => [0.0, 1.0, 0.0, 1.0],
};
// Draw button in current bounds.
let area = self.bounds();
self.fill_rect(area, color);
self.fill_rect(area.inflate(-1.0, -1.0), [0.0, 0.0, 0.0, 1.0]);
// Vertically center the caption.
let mut pos = ConvertibleUnit::convert_point(&self.scale_factor(),
PropPoint2D::new(0.5, 0.0));
pos.y = (self.bounds().size.height - self.current_font().height) / 2.0;
self.draw_text(pos, Align::Center, color, caption);
ret
}
fn begin_frame(&mut self) {
self.state_mut().tick += 1;
}
fn end_frame(&mut self) -> Vec<DrawBatch<Self::T, Self::V>> {
// Clean up transient mouse click info.
for i in 0..3 {
self.state_mut().click_state[i] = self.state().click_state[i].tick();
}
// Clean up text buffer
self.state_mut().text_input.clear();
let mut ret = Vec::new();
mem::swap(&mut ret, &mut self.state_mut().draw_list);
ret
}
fn bound<'a, U: ConvertibleUnit>(&'a mut self, area: TypedRect<f32, U>) -> Bounds<'a, Self> {
let mut area = ConvertibleUnit::convert_rect(&self.scale_factor(), area);
area.origin = self.transform(area.origin);
Bounds {
parent: self,
area: area,
is_clipped: false,
}
}
fn bound_clipped<'a, U: ConvertibleUnit>(&'a mut self,
area: TypedRect<f32, U>)
-> Bounds<'a, Self> {
let mut area = ConvertibleUnit::convert_rect(&self.scale_factor(), area);
area.origin = self.transform(area.origin);
self.state_mut().push_clip_rect(area);
Bounds {
parent: self,
area: area,
is_clipped: true,
}
}
/// Get the local space bounds rectangle of this context.
fn bounds(&self) -> Rect<f32> {
Rect::new(Point2D::new(0.0, 0.0), self.state().screen_size)
}
/// Get the global space bounds rectangle of this context.
fn global_bounds(&self) -> Rect<f32> {
let mut ret = self.bounds();
ret.origin = self.transform(ret.origin);
ret
}
fn scale_factor(&self) -> Size2D<f32> {
self.bounds().size
}
/// Get the mouse cursor position in global space.
fn mouse_pos(&self) -> Point2D<f32> {
self.state().mouse_pos
}
/// Register mouse button state.
fn input_mouse_button(&mut self, id: MouseButton, is_down: bool) {
if is_down {
self.state_mut().click_state[id as usize] = self.state().click_state[id as usize]
.input_press(self.mouse_pos());
} else {
self.state_mut().click_state[id as usize] = self.state().click_state[id as usize]
.input_release(self.mouse_pos());
}
}
/// Register mouse motion.
fn input_mouse_move(&mut self, x: i32, y: i32) {
self.state_mut().mouse_pos = Point2D::new(x as f32, y as f32);
}
/// Register printable character input.
fn input_char(&mut self, c: char) {
self.state_mut().text_input.push(KeyInput::Printable(c));
}
/// Register a nonprintable key state.
fn input_key_state(&mut self, k: Keycode, is_down: bool) {
if is_down {
self.state_mut().text_input.push(KeyInput::Other(k));
}
}
fn text_input(&mut self, color: [f32; 4], text_buffer: &mut String) {
// TODO: Focus system. Only accept input if current input widget is focused.
// (Also needs widget identifiers to know which is which.)
for c in &self.state().text_input {
match *c {
KeyInput::Printable(c) => {
if c >= ' ' {
text_buffer.push(c);
}
}
KeyInput::Other(Keycode::Backspace) => {
text_buffer.pop();
}
KeyInput::Other(_) => {}
}
}
// TODO: Option to draw cursor mid-string (font may be
// variable-width...), track cursor pos somehow (external ref or
// internal cache)
// TODO: Arrow keys move cursor
// TODO: Filter function for input, eg. numbers only.
// Nasty hack to show a blinking cursor. Will only work for cursor
// always at the end of the input.
if ((time::precise_time_s() * 3.0) % 3.0) as u32 == 0 {
self.draw_text(Point2D::new(0.0, 0.0), Align::Left, color, text_buffer);
} else {
self.draw_text(Point2D::new(0.0, 0.0),
Align::Left,
color,
&format!("{}_", text_buffer));
}
}
}
pub struct Bounds<'a, C: Context + 'a> {
parent: &'a mut C,
area: Rect<f32>,
is_clipped: bool,
}
impl<'a, C: Context> Context for Bounds<'a, C> {
type T = C::T;
type V = C::V;
fn state<'b>(&'b self) -> &'b State<Self::T, Self::V> {
self.parent.state()
}
fn state_mut<'b>(&'b mut self) -> &'b mut State<Self::T, Self::V> {
self.parent.state_mut()
}
fn new_vertex(&mut self,
pos: Point2D<f32>,
tex_coord: Point2D<f32>,
color: [f32; 4])
-> Self::V {
self.parent.new_vertex(pos, tex_coord, color)
}
fn transform(&self, in_pos: Point2D<f32>) -> Point2D<f32> {
self.parent.transform(in_pos + self.area.origin)
}
fn bounds(&self) -> Rect<f32> {
Rect::new(Point2D::new(0.0, 0.0), self.area.size)
}
}
impl<'a, C: Context> Drop for Bounds<'a, C> {
fn drop(&mut self) {
// If this is a clipping bounds context, remove the clip when going out of scope.
if self.is_clipped {
self.state_mut().pop_clip_rect();
}
}
}
/// A sequence of primitive draw operarations.
pub struct DrawBatch<T, V> {
/// Texture used for the current batch, details depend on backend
/// implementation
pub texture: T,
/// Clipping rectangle for the current batch
pub clip: Option<Rect<f32>>,
/// Vertex data
pub vertices: Vec<V>,
/// Indices into the vertex array for the triangles that make up the batch
pub triangle_indices: Vec<u16>,
}
/// Text alignment.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Align {
Left,
Center,
Right,
}
/// Mouse button identifier.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum MouseButton {
Left,
Middle,
Right,
}
/// Mouse click state.
#[derive(Copy, Clone, PartialEq, Debug)]
enum ClickState {
Unpressed,
Press(Point2D<f32>),
Drag(Point2D<f32>),
Release(Point2D<f32>, Point2D<f32>),
}
impl ClickState {
fn tick(self) -> ClickState {
match self {
ClickState::Unpressed |
ClickState::Release(_, _) => ClickState::Unpressed,
ClickState::Press(p) |
ClickState::Drag(p) => ClickState::Drag(p),
}
}
fn input_press(self, pos: Point2D<f32>) -> ClickState {
match self {
ClickState::Unpressed |
ClickState::Release(_, _) => ClickState::Press(pos),
ClickState::Press(p) |
ClickState::Drag(p) => ClickState::Drag(p),
}
}
fn input_release(self, pos: Point2D<f32>) -> ClickState {
match self {
ClickState::Unpressed => ClickState::Unpressed,
ClickState::Press(p) |
ClickState::Drag(p) |
ClickState::Release(p, _) => ClickState::Release(p, pos),
}
}
fn is_pressed(&self) -> bool {
match *self {
ClickState::Press(_) |
ClickState::Drag(_) |
ClickState::Release(_, _) => true,
ClickState::Unpressed => false,
}
}
fn is_release(&self) -> bool {
if let ClickState::Release(_, _) = *self {
true
} else {
false
}
}
}
/// Identifiers for nonprintable keys used in text editing widgets.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Keycode {
Tab,
Shift,
Ctrl,
Enter,
Backspace,
Del,
Up,
Down,
Left,
Right,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum KeyInput {
Printable(char),
Other(Keycode),
}
/// Font data for Vitral.
#[derive(Clone)]
pub struct FontData<T> {
/// Map from chars to glyph images.
pub chars: HashMap<char, CharData<T>>,
/// Line height for this font.
pub height: f32,
}
impl<T> FontData<T> {
/// Return the size of a string of text in this font.
pub fn render_size(&self, text: &str) -> Rect<f32> {
let mut w = 0.0;
for c in text.chars() {
if let Some(f) = self.chars.get(&c) {
w += f.advance;
}
}
Rect::new(Point2D::new(0.0, 0.0), Size2D::new(w, self.height))
}
/// Return the width of a char in the font.
pub fn char_width(&self, c: char) -> Option<f32> {
self.chars.get(&c).map(|c| c.advance)
}
pub fn str_width(&self, s: &str) -> f32 {
s.chars().map(|c| self.char_width(c).unwrap_or(0.0)).sum()
}
}
/// Drawable image data for Vitral.
#[derive(Clone, PartialEq)]
pub struct CharData<T> {
pub image: ImageData<T>,
pub draw_offset: Point2D<f32>,
pub advance: f32,
}
/// Drawable image data for Vitral.
#[derive(Clone, PartialEq)]
pub struct ImageData<T> {
pub texture: T,
pub size: Size2D<u32>,
pub tex_coords: Rect<f32>,
}
/// Action on a GUI button.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum ButtonAction {
Inert,
Hover,
Pressed,
LeftClicked,
RightClicked,
}
impl ButtonAction {
pub fn left_clicked(&self) -> bool {
self == &ButtonAction::LeftClicked
}
pub fn right_clicked(&self) -> bool {
self == &ButtonAction::RightClicked
}
}
/// Unit type for `euclid` primitives for representing proportional units in [0.0, 1.0].
pub struct ProportionalUnit;
/// Explicit unit type for pixel units, treated the same as `euclid::UnknownUnit`.
pub struct PixelUnit;
pub trait ConvertibleUnit: Sized {
fn scale_factor(scale: &Size2D<f32>) -> (f32, f32);
fn to_pixel_scale(scale: &Size2D<f32>, x: f32, y: f32) -> (f32, f32) {
let (w, h) = Self::scale_factor(scale);
(x * w, y * h)
}
fn from_pixel_scale(scale: &Size2D<f32>, x: f32, y: f32) -> (f32, f32) {
let (w, h) = Self::scale_factor(scale);
(x / w, y / h)
}
fn convert_rect(scale: &Size2D<f32>, rect: TypedRect<f32, Self>) -> Rect<f32> {
Rect::new(Self::convert_point(scale, rect.origin),
Self::convert_size(scale, rect.size))
}
fn convert_point(scale: &Size2D<f32>, point: TypedPoint2D<f32, Self>) -> Point2D<f32> {
let (x, y) = Self::to_pixel_scale(scale, point.x, point.y);
Point2D::new(x, y)
}
fn convert_size(scale: &Size2D<f32>, size: TypedSize2D<f32, Self>) -> Size2D<f32> {
let (width, height) = Self::to_pixel_scale(scale, size.width, size.height);
Size2D::new(width, height)
}
}
impl ConvertibleUnit for euclid::UnknownUnit {
fn scale_factor(_: &Size2D<f32>) -> (f32, f32) {
(1.0, 1.0)
}
}
impl ConvertibleUnit for PixelUnit {
fn scale_factor(_: &Size2D<f32>) -> (f32, f32) {
(1.0, 1.0)
}
}
impl ConvertibleUnit for ProportionalUnit {
fn scale_factor(scale: &Size2D<f32>) -> (f32, f32) {
(scale.width, scale.height)
}
}
/// Alias for proportional unit point type.
pub type PropPoint2D = TypedPoint2D<f32, ProportionalUnit>;
/// Alias for proportional unit size type.
pub type PropSize2D = TypedSize2D<f32, ProportionalUnit>;
/// Alias for proportional unit rectangle type.
pub type PropRect = TypedRect<f32, ProportionalUnit>;
Make low-level mesh ops public
extern crate euclid;
extern crate time;
use std::mem;
use std::iter;
use std::collections::HashMap;
use std::rc::Rc;
use euclid::{Point2D, Rect, Size2D};
use euclid::{TypedPoint2D, TypedRect, TypedSize2D};
/// Drawable image data for Vitral.
#[derive(Clone, PartialEq)]
pub struct ImageData<T> {
pub texture: T,
pub size: Size2D<u32>,
pub tex_coords: Rect<f32>,
}
/// Simple 32-bit image container.
///
/// The pixel data structure is RGBA.
#[derive(Clone)]
pub struct ImageBuffer {
/// Image size.
pub size: Size2D<u32>,
/// RGBA pixels, in rows from top left down, len must be width * height.
pub pixels: Vec<u32>,
}
impl ImageBuffer {
/// Build an empty buffer.
pub fn new(width: u32, height: u32) -> ImageBuffer {
ImageBuffer {
size: Size2D::new(width, height),
pixels: iter::repeat(0u32).take((width * height) as usize).collect(),
}
}
/// Build the buffer from a function.
pub fn from_fn<F>(width: u32, height: u32, f: F) -> ImageBuffer
where F: Fn(u32, u32) -> u32
{
let pixels = (0..)
.take((width * height) as usize)
.map(|i| f(i % width, i / width))
.collect();
ImageBuffer {
size: Size2D::new(width, height),
pixels: pixels,
}
}
/// Build the buffer from RGBA pixel iterator.
pub fn from_iter<I>(width: u32, height: u32, pixels: &mut I) -> ImageBuffer
where I: Iterator<Item = u32>
{
ImageBuffer {
size: Size2D::new(width, height),
pixels: pixels.take((width * height) as usize).collect(),
}
}
/// Copy all pixels from source buffer to self starting at given coordinates in self.
pub fn copy_from(&mut self, source: &ImageBuffer, x: u32, y: u32) {
let blit_rect = Rect::new(Point2D::new(x, y), source.size);
if let Some(blit_rect) = blit_rect.intersection(&Rect::new(Point2D::new(0, 0), self.size)) {
for y2 in blit_rect.min_y()..blit_rect.max_y() {
for x2 in blit_rect.min_x()..blit_rect.max_x() {
let self_idx = (x2 + y2 * self.size.width) as usize;
let source_idx = ((x2 - x) + (y2 - y) * source.size.width) as usize;
self.pixels[self_idx] = source.pixels[source_idx];
}
}
}
}
pub fn get_pixel(&self, x: u32, y: u32) -> u32 {
self.pixels[(x + y * self.size.width) as usize]
}
}
/// Builder for Vitral `State` structure.
pub struct Builder<T> {
user_font: Option<Rc<FontData<T>>>,
user_solid: Option<ImageData<T>>,
}
impl<T> Builder<T>
where T: Clone + Eq
{
pub fn new() -> Builder<T> {
Builder {
user_font: None,
user_solid: None,
}
}
/// Set a different font as the default font.
pub fn default_font(mut self, font: Rc<FontData<T>>) -> Builder<T> {
self.user_font = Some(font);
self
}
/// Give your own `ImageData` for the solid texture.
///
/// You want to use this if you have an image atlas and you want to have both drawing solid
/// shapes and textured shapes use the same texture resource and go to the same draw batch.
pub fn solid_texture(mut self, solid: ImageData<T>) -> Builder<T> {
self.user_solid = Some(solid);
self
}
fn build_default_font<F>(&self, make_t: &mut F) -> FontData<T>
where F: FnMut(ImageBuffer) -> T
{
static DEFAULT_FONT: &'static [u8] = include_bytes!("font-96x48.raw");
let (char_width, char_height) = (6, 8);
let (width, height) = (char_width * 16, char_height * 6);
let start_char = 32;
let end_char = 127;
let columns = width / char_width;
let img = ImageBuffer::from_fn(width, height, |x, y| {
let a = DEFAULT_FONT[(x + y * width) as usize] as u32;
(a << 24) | (a << 16) | (a << 8) | a
});
let t = make_t(img);
let mut map = HashMap::new();
for i in start_char..end_char {
let x = char_width * ((i - start_char) % columns);
let y = char_height * ((i - start_char) / columns);
let tex_coords = Rect::new(Point2D::new(x as f32 / width as f32,
y as f32 / height as f32),
Size2D::new(char_width as f32 / width as f32,
char_height as f32 / height as f32));
map.insert(std::char::from_u32(i).unwrap(),
CharData {
image: ImageData {
texture: t.clone(),
size: Size2D::new(char_width, char_height),
tex_coords: tex_coords,
},
draw_offset: Point2D::new(0.0, 0.0),
advance: char_width as f32,
});
}
FontData {
chars: map,
height: char_height as f32,
}
}
/// Construct an interface context instance.
///
/// Needs to be provided a texture creation function. If the user has not specified them
/// earlier, this will be used to construct a separate texture for the solid color and a
/// default font texture.
pub fn build<F, V>(self, screen_size: Size2D<f32>, mut make_t: F) -> State<T, V>
where F: FnMut(ImageBuffer) -> T
{
let font;
if let Some(user_font) = self.user_font {
font = user_font
} else {
font = Rc::new(self.build_default_font(&mut make_t));
}
let solid;
if let Some(user_solid) = self.user_solid {
solid = user_solid;
} else {
solid = ImageData {
texture: make_t(ImageBuffer::from_fn(1, 1, |_, _| 0xffffffff)),
size: Size2D::new(1, 1),
tex_coords: Rect::new(Point2D::new(0.0, 0.0), Size2D::new(1.0, 1.0)),
};
}
State::new(solid, screen_size, font)
}
}
/// An immediate mode graphical user interface context.
///
/// The context persists over a frame and receives commands that combine GUI
/// description and input handling. At the end of the frame, the commands are
/// converted into rendering instructions for the GUI.
pub struct State<T, V> {
draw_list: Vec<DrawBatch<T, V>>,
pub layout_pos: Point2D<f32>,
mouse_pos: Point2D<f32>,
click_state: [ClickState; 3],
// Make this Rc so it can be passed outside without copying and used as a reference in a
// mutable op.
default_font: Rc<FontData<T>>,
solid_texture: ImageData<T>,
text_input: Vec<KeyInput>,
tick: u64,
clip_stack: Vec<Rect<f32>>,
screen_size: Size2D<f32>,
}
impl<T, V> State<T, V>
where T: Clone + Eq
{
fn new(solid_texture: ImageData<T>,
screen_size: Size2D<f32>,
default_font: Rc<FontData<T>>)
-> State<T, V> {
State {
draw_list: Vec::new(),
// solid_texture: Image(0),
layout_pos: Point2D::new(0.0, 0.0),
mouse_pos: Point2D::new(0.0, 0.0),
click_state: [ClickState::Unpressed, ClickState::Unpressed, ClickState::Unpressed],
default_font: default_font,
solid_texture: solid_texture,
text_input: Vec::new(),
tick: 0,
clip_stack: Vec::new(),
screen_size: screen_size,
}
}
/// Push vertex into the draw batch, return its index offset.
///
/// Index offsets are guaranteed to be consecutive and ascending as long as the current draw
/// batch has not been switched, so you can grab the return value from the first `vertex_push`
/// and express the rest by adding offsets to it.
#[inline(always)]
pub fn push_vertex(&mut self, vtx: V) -> u16 {
let idx = self.draw_list.len() - 1;
let batch = &mut self.draw_list[idx];
let idx_offset = batch.vertices.len() as u16;
batch.vertices.push(vtx);
idx_offset
}
#[inline(always)]
pub fn push_triangle(&mut self, i1: u16, i2: u16, i3: u16) {
let idx = self.draw_list.len() - 1;
let batch = &mut self.draw_list[idx];
batch.triangle_indices.push(i1);
batch.triangle_indices.push(i2);
batch.triangle_indices.push(i3);
}
/// Push a clipping rectangle into the clip stack.
fn push_clip_rect(&mut self, area: Rect<f32>) {
self.clip_stack.push(area);
self.check_batch(None);
}
/// Pop the last clipping rectangle from the clip stack.
///
/// The clip stack must have had at least one rectangle added with `push_clip_rect`.
fn pop_clip_rect(&mut self) -> Rect<f32> {
self.clip_stack.pop().expect("Popping an empty clip stack")
}
/// Return current clip rectangle, if any.
fn clip_rect(&self) -> Option<Rect<f32>> {
if self.clip_stack.is_empty() {
None
} else {
Some(self.clip_stack[self.clip_stack.len() - 1])
}
}
pub fn start_solid_texture(&mut self) {
let t = self.solid_texture.texture.clone();
self.start_texture(t);
}
fn solid_texture_texcoord(&self) -> Point2D<f32> {
self.solid_texture.tex_coords.origin
}
pub fn start_texture(&mut self, texture: T) {
self.check_batch(Some(texture));
}
fn current_batch_is_invalid(&self, texture: T) -> bool {
if self.draw_list.is_empty() {
return true;
}
if self.draw_list[self.draw_list.len() - 1].texture != texture {
return true;
}
if self.draw_list[self.draw_list.len() - 1].clip != self.clip_rect() {
return true;
}
// Getting too close to u16 limit for comfort.
if self.draw_list[self.draw_list.len() - 1].vertices.len() > (1 << 15) {
return true;
}
false
}
/// Start a new render batch if needed.
///
/// Need to start a new batch if render state has changed or if the current one is growing too
/// large for the u16 indices.
fn check_batch(&mut self, texture_needed: Option<T>) {
if texture_needed.is_none() && self.draw_list.is_empty() {
// Do nothing for stuff that only affects ongoing drawing.
return;
}
let texture = texture_needed.unwrap_or_else(|| {
self.draw_list[self.draw_list.len() - 1].texture.clone()
});
let clip = self.clip_rect();
if self.current_batch_is_invalid(texture.clone()) {
self.draw_list.push(DrawBatch {
texture: texture,
clip: clip,
vertices: Vec::new(),
triangle_indices: Vec::new(),
});
}
}
}
/// Command interface for a Vitral GUI.
pub trait Context: Sized {
type T: Clone + Eq;
type V;
/// Return the internal GUI state.
///
/// This is mostly intended for other trait methods, not for direct use.
fn state<'a>(&'a self) -> &'a State<Self::T, Self::V>;
/// Return mutable reference to the internal GUI state.
///
/// This is mostly intended for other trait methods, not for direct use.
fn state_mut<'a>(&'a mut self) -> &'a mut State<Self::T, Self::V>;
/// Construct a new vertex.
///
/// Properties other than position and texture coordinate are provided by the implementation.
fn new_vertex(&mut self,
pos: Point2D<f32>,
tex_coord: Point2D<f32>,
color: [f32; 4])
-> Self::V;
/// Return reference to the currently active font.
fn current_font<'a>(&'a mut self) -> Rc<FontData<Self::T>> {
self.state().default_font.clone()
}
fn push_vertex<U: ConvertibleUnit>(&mut self,
pos: TypedPoint2D<f32, U>,
tex_coord: Point2D<f32>,
color: [f32; 4])
-> u16 {
let pos = ConvertibleUnit::convert_point(&self.scale_factor(), pos);
// NB: Transform is called on incoming vertices here, if any other place is pushing
// vertices to the underlying state, make sure they go through `transform` as well.
let pos = self.transform(pos);
let v = self.new_vertex(pos, tex_coord, color);
self.state_mut().push_vertex(v)
}
/// Transform point from the space of this context to global space.
fn transform(&self, in_pos: Point2D<f32>) -> Point2D<f32> {
in_pos
}
fn draw_line<U: ConvertibleUnit>(&mut self,
thickness: f32,
color: [f32; 4],
p1: TypedPoint2D<f32, U>,
p2: TypedPoint2D<f32, U>) {
if p1 == p2 {
return;
}
// Convert to screen space here because before applying thickness so that thickness will
// always be in pixel units.
let p1 = ConvertibleUnit::convert_point(&self.scale_factor(), p1);
let p2 = ConvertibleUnit::convert_point(&self.scale_factor(), p2);
self.state_mut().start_solid_texture();
let t = self.state().solid_texture_texcoord();
// Displacements from the one-dimensional base line.
let mut front = p2 - p1;
front = front / front.dot(front).sqrt() * (thickness / 2.0);
let side = TypedPoint2D::new(-front.y, front.x);
let q1 = p1 - side - front + Point2D::new(0.5, 0.5);
let q2 = p1 + side - front + Point2D::new(0.5, 0.5);
let q3 = p2 + side + front + Point2D::new(0.5, 0.5);
let q4 = p2 - side + front + Point2D::new(0.5, 0.5);
let idx = self.push_vertex(q1, t, color);
self.push_vertex(q2, t, color);
self.push_vertex(q3, t, color);
self.push_vertex(q4, t, color);
self.state_mut().push_triangle(idx, idx + 1, idx + 2);
self.state_mut().push_triangle(idx, idx + 2, idx + 3);
}
fn draw_tex_rect<U: ConvertibleUnit>(&mut self,
area: TypedRect<f32, U>,
tex_coords: Rect<f32>,
color: [f32; 4]) {
let idx = self.push_vertex(area.origin, tex_coords.origin, color);
self.push_vertex(area.top_right(), tex_coords.top_right(), color);
self.push_vertex(area.bottom_right(), tex_coords.bottom_right(), color);
self.push_vertex(area.bottom_left(), tex_coords.bottom_left(), color);
self.state_mut().push_triangle(idx, idx + 1, idx + 2);
self.state_mut().push_triangle(idx, idx + 2, idx + 3);
}
fn fill_rect<U: ConvertibleUnit>(&mut self, area: TypedRect<f32, U>, color: [f32; 4]) {
self.state_mut().start_solid_texture();
self.draw_tex_rect(area,
Rect::new(Point2D::new(0.0, 0.0), Size2D::new(0.0, 0.0)),
color);
}
fn draw_image<U: ConvertibleUnit>(&mut self,
image: &ImageData<Self::T>,
pos: TypedPoint2D<f32, U>,
color: [f32; 4]) {
let pos = ConvertibleUnit::convert_point(&self.scale_factor(), pos);
self.state_mut().start_texture(image.texture.clone());
let size = Size2D::new(image.size.width as f32, image.size.height as f32);
self.draw_tex_rect(Rect::new(pos, size), image.tex_coords, color);
}
/// Draw a line of text to screen.
///
/// The `align` parameter indicates whether pos is interpreted as top left, top middle or top
/// right position of the string.
///
/// The return value is the position for the next line.
fn draw_text<U: ConvertibleUnit>(&mut self,
pos: TypedPoint2D<f32, U>,
align: Align,
color: [f32; 4],
text: &str)
-> TypedPoint2D<f32, U> {
// Convert to pixel space here, because font offsetting will operate in pixel space.
let mut pixel_pos = ConvertibleUnit::convert_point(&self.scale_factor(), pos);
pixel_pos.x -= match align {
Align::Left => 0.0,
Align::Center => self.current_font().str_width(text) / 2.0,
Align::Right => self.current_font().str_width(text),
};
for c in text.chars() {
// XXX: Gratuitous cloning because of borrow checker.
let x = self.current_font().chars.get(&c).cloned();
// TODO: Draw some sort of symbol for characters missing from font.
if let Some(f) = x {
self.draw_image(&f.image, pixel_pos - f.draw_offset, color);
pixel_pos.x += f.advance;
}
}
let (_, delta) = U::from_pixel_scale(&self.scale_factor(), 0.0, self.current_font().height);
TypedPoint2D::new(pos.x, pos.y + delta)
}
/// Return the mouse input state for the current bounds area.
fn click_state(&self) -> ButtonAction {
let is_hovering = self.global_bounds().contains(&self.mouse_pos());
let left_press = self.state().click_state[MouseButton::Left as usize].is_pressed() &&
is_hovering;
let right_press = self.state().click_state[MouseButton::Right as usize].is_pressed() &&
is_hovering;
let is_pressed = left_press || right_press;
// Determine the return value.
if left_press && self.state().click_state[MouseButton::Left as usize].is_release() {
ButtonAction::LeftClicked
} else if right_press && self.state().click_state[MouseButton::Right as usize].is_release() {
ButtonAction::RightClicked
} else if is_pressed {
ButtonAction::Pressed
} else if is_hovering {
ButtonAction::Hover
} else {
ButtonAction::Inert
}
}
/// Draw a button in the current bounds
fn button(&mut self, caption: &str) -> ButtonAction {
let ret = self.click_state();
// Choose color.
// TODO: Way to parametrize UI colors in style data.
let color = match ret {
ButtonAction::Pressed => [1.0, 1.0, 0.0, 1.0],
ButtonAction::Hover => [0.5, 1.0, 0.0, 1.0],
_ => [0.0, 1.0, 0.0, 1.0],
};
// Draw button in current bounds.
let area = self.bounds();
self.fill_rect(area, color);
self.fill_rect(area.inflate(-1.0, -1.0), [0.0, 0.0, 0.0, 1.0]);
// Vertically center the caption.
let mut pos = ConvertibleUnit::convert_point(&self.scale_factor(),
PropPoint2D::new(0.5, 0.0));
pos.y = (self.bounds().size.height - self.current_font().height) / 2.0;
self.draw_text(pos, Align::Center, color, caption);
ret
}
fn begin_frame(&mut self) {
self.state_mut().tick += 1;
}
fn end_frame(&mut self) -> Vec<DrawBatch<Self::T, Self::V>> {
// Clean up transient mouse click info.
for i in 0..3 {
self.state_mut().click_state[i] = self.state().click_state[i].tick();
}
// Clean up text buffer
self.state_mut().text_input.clear();
let mut ret = Vec::new();
mem::swap(&mut ret, &mut self.state_mut().draw_list);
ret
}
fn bound<'a, U: ConvertibleUnit>(&'a mut self, area: TypedRect<f32, U>) -> Bounds<'a, Self> {
let mut area = ConvertibleUnit::convert_rect(&self.scale_factor(), area);
area.origin = self.transform(area.origin);
Bounds {
parent: self,
area: area,
is_clipped: false,
}
}
fn bound_clipped<'a, U: ConvertibleUnit>(&'a mut self,
area: TypedRect<f32, U>)
-> Bounds<'a, Self> {
let mut area = ConvertibleUnit::convert_rect(&self.scale_factor(), area);
area.origin = self.transform(area.origin);
self.state_mut().push_clip_rect(area);
Bounds {
parent: self,
area: area,
is_clipped: true,
}
}
/// Get the local space bounds rectangle of this context.
fn bounds(&self) -> Rect<f32> {
Rect::new(Point2D::new(0.0, 0.0), self.state().screen_size)
}
/// Get the global space bounds rectangle of this context.
fn global_bounds(&self) -> Rect<f32> {
let mut ret = self.bounds();
ret.origin = self.transform(ret.origin);
ret
}
fn scale_factor(&self) -> Size2D<f32> {
self.bounds().size
}
/// Get the mouse cursor position in global space.
fn mouse_pos(&self) -> Point2D<f32> {
self.state().mouse_pos
}
/// Register mouse button state.
fn input_mouse_button(&mut self, id: MouseButton, is_down: bool) {
if is_down {
self.state_mut().click_state[id as usize] = self.state().click_state[id as usize]
.input_press(self.mouse_pos());
} else {
self.state_mut().click_state[id as usize] = self.state().click_state[id as usize]
.input_release(self.mouse_pos());
}
}
/// Register mouse motion.
fn input_mouse_move(&mut self, x: i32, y: i32) {
self.state_mut().mouse_pos = Point2D::new(x as f32, y as f32);
}
/// Register printable character input.
fn input_char(&mut self, c: char) {
self.state_mut().text_input.push(KeyInput::Printable(c));
}
/// Register a nonprintable key state.
fn input_key_state(&mut self, k: Keycode, is_down: bool) {
if is_down {
self.state_mut().text_input.push(KeyInput::Other(k));
}
}
fn text_input(&mut self, color: [f32; 4], text_buffer: &mut String) {
// TODO: Focus system. Only accept input if current input widget is focused.
// (Also needs widget identifiers to know which is which.)
for c in &self.state().text_input {
match *c {
KeyInput::Printable(c) => {
if c >= ' ' {
text_buffer.push(c);
}
}
KeyInput::Other(Keycode::Backspace) => {
text_buffer.pop();
}
KeyInput::Other(_) => {}
}
}
// TODO: Option to draw cursor mid-string (font may be
// variable-width...), track cursor pos somehow (external ref or
// internal cache)
// TODO: Arrow keys move cursor
// TODO: Filter function for input, eg. numbers only.
// Nasty hack to show a blinking cursor. Will only work for cursor
// always at the end of the input.
if ((time::precise_time_s() * 3.0) % 3.0) as u32 == 0 {
self.draw_text(Point2D::new(0.0, 0.0), Align::Left, color, text_buffer);
} else {
self.draw_text(Point2D::new(0.0, 0.0),
Align::Left,
color,
&format!("{}_", text_buffer));
}
}
}
pub struct Bounds<'a, C: Context + 'a> {
parent: &'a mut C,
area: Rect<f32>,
is_clipped: bool,
}
impl<'a, C: Context> Context for Bounds<'a, C> {
type T = C::T;
type V = C::V;
fn state<'b>(&'b self) -> &'b State<Self::T, Self::V> {
self.parent.state()
}
fn state_mut<'b>(&'b mut self) -> &'b mut State<Self::T, Self::V> {
self.parent.state_mut()
}
fn new_vertex(&mut self,
pos: Point2D<f32>,
tex_coord: Point2D<f32>,
color: [f32; 4])
-> Self::V {
self.parent.new_vertex(pos, tex_coord, color)
}
fn transform(&self, in_pos: Point2D<f32>) -> Point2D<f32> {
self.parent.transform(in_pos + self.area.origin)
}
fn bounds(&self) -> Rect<f32> {
Rect::new(Point2D::new(0.0, 0.0), self.area.size)
}
}
impl<'a, C: Context> Drop for Bounds<'a, C> {
fn drop(&mut self) {
// If this is a clipping bounds context, remove the clip when going out of scope.
if self.is_clipped {
self.state_mut().pop_clip_rect();
}
}
}
/// A sequence of primitive draw operarations.
pub struct DrawBatch<T, V> {
/// Texture used for the current batch, details depend on backend
/// implementation
pub texture: T,
/// Clipping rectangle for the current batch
pub clip: Option<Rect<f32>>,
/// Vertex data
pub vertices: Vec<V>,
/// Indices into the vertex array for the triangles that make up the batch
pub triangle_indices: Vec<u16>,
}
/// Text alignment.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Align {
Left,
Center,
Right,
}
/// Mouse button identifier.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum MouseButton {
Left,
Middle,
Right,
}
/// Mouse click state.
#[derive(Copy, Clone, PartialEq, Debug)]
enum ClickState {
Unpressed,
Press(Point2D<f32>),
Drag(Point2D<f32>),
Release(Point2D<f32>, Point2D<f32>),
}
impl ClickState {
fn tick(self) -> ClickState {
match self {
ClickState::Unpressed |
ClickState::Release(_, _) => ClickState::Unpressed,
ClickState::Press(p) |
ClickState::Drag(p) => ClickState::Drag(p),
}
}
fn input_press(self, pos: Point2D<f32>) -> ClickState {
match self {
ClickState::Unpressed |
ClickState::Release(_, _) => ClickState::Press(pos),
ClickState::Press(p) |
ClickState::Drag(p) => ClickState::Drag(p),
}
}
fn input_release(self, pos: Point2D<f32>) -> ClickState {
match self {
ClickState::Unpressed => ClickState::Unpressed,
ClickState::Press(p) |
ClickState::Drag(p) |
ClickState::Release(p, _) => ClickState::Release(p, pos),
}
}
fn is_pressed(&self) -> bool {
match *self {
ClickState::Press(_) |
ClickState::Drag(_) |
ClickState::Release(_, _) => true,
ClickState::Unpressed => false,
}
}
fn is_release(&self) -> bool {
if let ClickState::Release(_, _) = *self {
true
} else {
false
}
}
}
/// Identifiers for nonprintable keys used in text editing widgets.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Keycode {
Tab,
Shift,
Ctrl,
Enter,
Backspace,
Del,
Up,
Down,
Left,
Right,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum KeyInput {
Printable(char),
Other(Keycode),
}
/// Font data for Vitral.
#[derive(Clone)]
pub struct FontData<T> {
/// Map from chars to glyph images.
pub chars: HashMap<char, CharData<T>>,
/// Line height for this font.
pub height: f32,
}
impl<T> FontData<T> {
/// Return the size of a string of text in this font.
pub fn render_size(&self, text: &str) -> Rect<f32> {
let mut w = 0.0;
for c in text.chars() {
if let Some(f) = self.chars.get(&c) {
w += f.advance;
}
}
Rect::new(Point2D::new(0.0, 0.0), Size2D::new(w, self.height))
}
/// Return the width of a char in the font.
pub fn char_width(&self, c: char) -> Option<f32> {
self.chars.get(&c).map(|c| c.advance)
}
pub fn str_width(&self, s: &str) -> f32 {
s.chars().map(|c| self.char_width(c).unwrap_or(0.0)).sum()
}
}
/// Drawable image data for Vitral.
#[derive(Clone, PartialEq)]
pub struct CharData<T> {
pub image: ImageData<T>,
pub draw_offset: Point2D<f32>,
pub advance: f32,
}
/// Action on a GUI button.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum ButtonAction {
Inert,
Hover,
Pressed,
LeftClicked,
RightClicked,
}
impl ButtonAction {
pub fn left_clicked(&self) -> bool {
self == &ButtonAction::LeftClicked
}
pub fn right_clicked(&self) -> bool {
self == &ButtonAction::RightClicked
}
}
/// Unit type for `euclid` primitives for representing proportional units in [0.0, 1.0].
pub struct ProportionalUnit;
/// Explicit unit type for pixel units, treated the same as `euclid::UnknownUnit`.
pub struct PixelUnit;
pub trait ConvertibleUnit: Sized {
fn scale_factor(scale: &Size2D<f32>) -> (f32, f32);
fn to_pixel_scale(scale: &Size2D<f32>, x: f32, y: f32) -> (f32, f32) {
let (w, h) = Self::scale_factor(scale);
(x * w, y * h)
}
fn from_pixel_scale(scale: &Size2D<f32>, x: f32, y: f32) -> (f32, f32) {
let (w, h) = Self::scale_factor(scale);
(x / w, y / h)
}
fn convert_rect(scale: &Size2D<f32>, rect: TypedRect<f32, Self>) -> Rect<f32> {
Rect::new(Self::convert_point(scale, rect.origin),
Self::convert_size(scale, rect.size))
}
fn convert_point(scale: &Size2D<f32>, point: TypedPoint2D<f32, Self>) -> Point2D<f32> {
let (x, y) = Self::to_pixel_scale(scale, point.x, point.y);
Point2D::new(x, y)
}
fn convert_size(scale: &Size2D<f32>, size: TypedSize2D<f32, Self>) -> Size2D<f32> {
let (width, height) = Self::to_pixel_scale(scale, size.width, size.height);
Size2D::new(width, height)
}
}
impl ConvertibleUnit for euclid::UnknownUnit {
fn scale_factor(_: &Size2D<f32>) -> (f32, f32) {
(1.0, 1.0)
}
}
impl ConvertibleUnit for PixelUnit {
fn scale_factor(_: &Size2D<f32>) -> (f32, f32) {
(1.0, 1.0)
}
}
impl ConvertibleUnit for ProportionalUnit {
fn scale_factor(scale: &Size2D<f32>) -> (f32, f32) {
(scale.width, scale.height)
}
}
/// Alias for proportional unit point type.
pub type PropPoint2D = TypedPoint2D<f32, ProportionalUnit>;
/// Alias for proportional unit size type.
pub type PropSize2D = TypedSize2D<f32, ProportionalUnit>;
/// Alias for proportional unit rectangle type.
pub type PropRect = TypedRect<f32, ProportionalUnit>;
|
#[macro_use] extern crate matches;
pub extern crate mime;
use std::io;
pub enum DataUrlError {
NotADataUrl,
NoComma,
}
pub struct DataUrl<'a> {
mime_type: mime::Mime,
base64: bool,
encoded_body_plus_fragment: &'a str,
}
pub struct Base64Error(());
impl<'a> DataUrl<'a> {
/// <https://fetch.spec.whatwg.org/#data-url-processor>
/// but starting from a string rather than a Url, to avoid extra string copies.
pub fn process(input: &'a str) -> Result<Self, DataUrlError> {
use DataUrlError::*;
let after_colon = pretend_parse_data_url(input).ok_or(NotADataUrl)?;
let (from_colon_to_comma, encoded_body_plus_fragment) =
find_comma_before_fragment(after_colon).ok_or(NoComma)?;
let (mime_type, base64) = parse_header(from_colon_to_comma);
Ok(DataUrl { mime_type, base64, encoded_body_plus_fragment })
}
pub fn mime_type(&self) -> &mime::Mime {
&self.mime_type
}
/// Streaming-decode the data URL’s body to `sink`.
///
/// Errors while writing to the sink are propagated.
/// Invalid base64 causes an error with `e.kind() == ErrorKind::InvalidData`.
/// When decoding without error, the URL’s fragment identifier is returned if it has one.
///
/// The fragment identifier is represented as in the origin input.
/// It needs to be either percent-encoded to obtain the same string as in a parsed URL,
/// or percent-decoded to interpret it as text.
pub fn decode_body<W>(&self, sink: W) -> io::Result<Result<Option<&'a str>, Base64Error>>
where W: io::Write
{
if self.base64 {
decode_with_base64(self.encoded_body_plus_fragment, sink)
} else {
decode_without_base64(self.encoded_body_plus_fragment, sink).map(Ok)
}
}
pub fn decode_body_to_vec(&self) -> Result<(Vec<u8>, Option<&str>), Base64Error> {
let mut sink = Vec::new();
let base64_result = self.decode_body(&mut sink).unwrap();
let url_fragment = base64_result?;
Ok((sink, url_fragment))
}
}
macro_rules! require {
($condition: expr) => {
if !$condition {
return None
}
}
}
/// Similar to <https://url.spec.whatwg.org/#concept-basic-url-parser>
/// followed by <https://url.spec.whatwg.org/#concept-url-serializer>
///
/// * `None`: not a data URL.
///
/// * `Some(s)`: sort of the result of serialization, except:
///
/// - `data:` prefix removed
/// - The fragment is included
/// - Other components are **not** UTF-8 percent-encoded
/// - ASCII tabs and newlines in the middle are **not** removed
fn pretend_parse_data_url(input: &str) -> Option<&str> {
// Trim C0 control or space
let left_trimmed = input.trim_left_matches(|ch| ch <= ' ');
let mut bytes = left_trimmed.bytes();
{
// Ignore ASCII tabs or newlines
let mut iter = bytes.by_ref().filter(|&byte| !matches!(byte, b'\t' | b'\n' | b'\r'));
require!(iter.next()?.to_ascii_lowercase() == b'd');
require!(iter.next()?.to_ascii_lowercase() == b'a');
require!(iter.next()?.to_ascii_lowercase() == b't');
require!(iter.next()?.to_ascii_lowercase() == b'a');
require!(iter.next()? == b':');
}
let bytes_consumed = left_trimmed.len() - bytes.len();
let after_colon = &left_trimmed[bytes_consumed..];
// Trim C0 control or space
Some(after_colon.trim_right_matches(|ch| ch <= ' '))
}
fn find_comma_before_fragment(after_colon: &str) -> Option<(&str, &str)> {
for (i, byte) in after_colon.bytes().enumerate() {
if byte == b',' {
return Some((&after_colon[..i], &after_colon[i + 1..]))
}
if byte == b'#' {
break
}
}
None
}
fn parse_header(from_colon_to_comma: &str) -> (mime::Mime, bool) {
let input = from_colon_to_comma.chars()
.filter(|&c| !matches!(c, '\t' | '\n' | '\r')) // Removed by the URL parser
.collect::<String>();
let mut string;
let input = input.trim_matches(' ');
let (mut input, base64) = match without_base64_suffix(input) {
Some(s) => (s, true),
None => (input, false),
};
// FIXME: percent-encode
if input.starts_with(';') {
string = String::from("text/plain");
string.push_str(input);
input = &*string;
}
// FIXME: does Mime::from_str match the MIME Sniffing Standard’s parsing algorithm?
// <https://mimesniff.spec.whatwg.org/#parse-a-mime-type>
let mime_type = input.parse()
.unwrap_or_else(|_| "text/plain;charset=US-ASCII".parse().unwrap());
(mime_type, base64)
}
/// None: no base64 suffix
fn without_base64_suffix(s: &str) -> Option<&str> {
remove_suffix(
remove_suffix(s, "base64", str::eq_ignore_ascii_case)?
.trim_right_matches(' '),
";", str::eq
)
}
fn remove_suffix<'a, Eq>(haystack: &'a str, needle: &str, eq: Eq) -> Option<&'a str>
where Eq: Fn(&str, &str) -> bool
{
let start_index = haystack.len().checked_sub(needle.len())?;
let (before, after) = haystack.split_at(start_index);
if eq(after, needle) {
Some(before)
} else {
None
}
}
/// This is <https://url.spec.whatwg.org/#string-percent-decode> while also:
///
/// * Ignoring ASCII tab or newlines
/// * Stopping at the first '#' (which indicates the start of the fragment)
///
/// Anything that would have been UTF-8 percent-encoded by the URL parser
/// would be percent-decoded here.
/// We skip that round-trip and pass it through unchanged.
fn decode_without_base64<W>(encoded_body_plus_fragment: &str, mut sink: W)
-> io::Result<Option<&str>>
where W: io::Write
{
let bytes = encoded_body_plus_fragment.as_bytes();
let mut slice_start = 0;
for (i, &byte) in bytes.iter().enumerate() {
// We only need to look for 5 different "special" byte values.
// For everything else we make slices as large as possible, borrowing the input,
// in order to make fewer write_all() calls.
if matches!(byte, b'%' | b'#' | b'\t' | b'\n' | b'\r') {
// Write everything (if anything) "non-special" we’ve accumulated
// before this special byte
if i > slice_start {
sink.write_all(&bytes[slice_start..i])?;
}
// Then deal with the special byte.
match byte {
b'%' => {
let l = bytes.get(i + 2).and_then(|&b| (b as char).to_digit(16));
let h = bytes.get(i + 1).and_then(|&b| (b as char).to_digit(16));
if let (Some(h), Some(l)) = (h, l) {
// '%' followed by two ASCII hex digits
let one_byte = h as u8 * 0x10 + l as u8;
sink.write_all(&[one_byte])?;
slice_start = i + 3;
} else {
// Do nothing. Leave slice_start unchanged.
// The % sign will be part of the next slice.
}
}
b'#' => {
let fragment_start = i + 1;
return Ok(Some(&encoded_body_plus_fragment[fragment_start..]))
}
// Ignore over '\t' | '\n' | '\r'
_ => slice_start = i + 1
}
}
}
sink.write_all(&bytes[slice_start..])?;
Ok(None)
}
/// `decode_without_base64()` composed with
/// <https://infra.spec.whatwg.org/#isomorphic-decode> composed with
/// <https://infra.spec.whatwg.org/#forgiving-base64-decode>.
fn decode_with_base64<W>(encoded_body_plus_fragment: &str, sink: W)
-> io::Result<Result<Option<&str>, Base64Error>>
where W: io::Write
{
let mut decoder = Base64Decoder {
sink,
bit_buffer: 0,
buffer_bit_length: 0,
padding_symbols: 0,
base64_error: false,
};
let result = decode_without_base64(encoded_body_plus_fragment, &mut decoder);
if decoder.base64_error {
return Ok(Err(Base64Error(())))
}
let fragment = result?;
match (decoder.buffer_bit_length, decoder.padding_symbols) {
(0, 0) => {
// A multiple of four of alphabet symbols, and nothing else.
}
(12, 2) | (12, 0) => {
// A multiple of four of alphabet symbols, followed by two more symbols,
// optionally followed by two padding characters (which make a total multiple of four).
let byte_buffer = [
(decoder.bit_buffer >> 4) as u8,
];
decoder.sink.write_all(&byte_buffer)?;
}
(18, 1) | (18, 0) => {
// A multiple of four of alphabet symbols, followed by three more symbols,
// optionally followed by one padding character (which make a total multiple of four).
let byte_buffer = [
(decoder.bit_buffer >> 10) as u8,
(decoder.bit_buffer >> 2) as u8,
];
decoder.sink.write_all(&byte_buffer)?;
}
_ => {
// No other combination is acceptable
return Ok(Err(Base64Error(())))
}
}
Ok(Ok(fragment))
}
struct Base64Decoder<W> {
sink: W,
bit_buffer: u32,
buffer_bit_length: u8,
padding_symbols: u8,
base64_error: bool,
}
impl<W> io::Write for Base64Decoder<W> where W: io::Write {
fn write(&mut self, _buf: &[u8]) -> io::Result<usize> { unimplemented!() }
fn flush(&mut self) -> io::Result<()> { unimplemented!() }
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
for &byte in buf.iter() {
let value = BASE64_DECODE_TABLE[byte as usize];
if value < 0 {
// A character that’s not part of the alphabet
// Remove ASCII whitespace
// '\t' | '\n' | '\r' was already filtered by decode_without_base64()
if byte == b' ' || byte == b'\x0C' {
continue
}
if byte == b'=' {
self.padding_symbols = self.padding_symbols.saturating_add(8);
continue
}
self.base64_error = true;
Err(io::ErrorKind::InvalidData)?
}
if self.padding_symbols > 0 {
// Alphabet symbols after padding
self.base64_error = true;
Err(io::ErrorKind::InvalidData)?
}
self.bit_buffer <<= 6;
self.bit_buffer |= value as u32;
if self.buffer_bit_length < 24 {
self.buffer_bit_length += 6;
} else {
// We’ve accumulated four times 6 bits, which equals three times 8 bits.
let byte_buffer = [
(self.bit_buffer >> 16) as u8,
(self.bit_buffer >> 8) as u8,
self.bit_buffer as u8,
];
self.sink.write_all(&byte_buffer)?;
self.buffer_bit_length = 0;
// No need to reset self.bit_buffer,
// since next time we’re only gonna read relevant bits.
}
}
Ok(())
}
}
/// Generated by `make_base64_decode_table.py` based on "Table 1: The Base 64 Alphabet"
/// at <https://tools.ietf.org/html/rfc4648#section-4>
///
/// Array indices are the byte value of symbols.
/// Array values are their positions in the base64 alphabet,
/// or -1 for symbols not in the alphabet.
/// The position contributes 6 bits to the decoded bytes.
const BASE64_DECODE_TABLE: [i8; 256] = [
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1,
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1,
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
];
Use a closure and a generic error type rather than io::Write
#[macro_use] extern crate matches;
pub extern crate mime;
pub enum DataUrlError {
NotADataUrl,
NoComma,
}
pub struct DataUrl<'a> {
mime_type: mime::Mime,
base64: bool,
encoded_body_plus_fragment: &'a str,
}
pub enum DecodeError<E> {
InvalidBase64(InvalidBase64),
WriteError(E),
}
pub struct InvalidBase64(());
impl<E> From<InvalidBase64> for DecodeError<E> {
fn from(e: InvalidBase64) -> Self { DecodeError::InvalidBase64(e) }
}
impl<'a> DataUrl<'a> {
/// <https://fetch.spec.whatwg.org/#data-url-processor>
/// but starting from a string rather than a Url, to avoid extra string copies.
pub fn process(input: &'a str) -> Result<Self, DataUrlError> {
use DataUrlError::*;
let after_colon = pretend_parse_data_url(input).ok_or(NotADataUrl)?;
let (from_colon_to_comma, encoded_body_plus_fragment) =
find_comma_before_fragment(after_colon).ok_or(NoComma)?;
let (mime_type, base64) = parse_header(from_colon_to_comma);
Ok(DataUrl { mime_type, base64, encoded_body_plus_fragment })
}
pub fn mime_type(&self) -> &mime::Mime {
&self.mime_type
}
/// Streaming-decode the data URL’s body to `write_body_bytes`,
/// and return the URL’s fragment identifier is returned if it has one.
///
/// The fragment is represented as in the origin input.
/// It needs to be either percent-encoded to obtain the same string as in a parsed URL,
/// or percent-decoded to interpret it as text.
pub fn decode<F, E>(&self, write_body_bytes: F) -> Result<Option<&'a str>, DecodeError<E>>
where F: FnMut(&[u8]) -> Result<(), E>
{
if self.base64 {
decode_with_base64(self.encoded_body_plus_fragment, write_body_bytes)
} else {
decode_without_base64(self.encoded_body_plus_fragment, write_body_bytes)
.map_err(DecodeError::WriteError)
}
}
pub fn decode_to_vec(&self) -> Result<(Vec<u8>, Option<&str>), InvalidBase64> {
enum Impossible {}
let mut body = Vec::new();
let result = self.decode::<_, Impossible>(|bytes| Ok(body.extend_from_slice(bytes)));
match result {
Ok(url_fragment) => Ok((body, url_fragment)),
Err(DecodeError::InvalidBase64(e)) => Err(e),
Err(DecodeError::WriteError(e)) => match e {}
}
}
}
macro_rules! require {
($condition: expr) => {
if !$condition {
return None
}
}
}
/// Similar to <https://url.spec.whatwg.org/#concept-basic-url-parser>
/// followed by <https://url.spec.whatwg.org/#concept-url-serializer>
///
/// * `None`: not a data URL.
///
/// * `Some(s)`: sort of the result of serialization, except:
///
/// - `data:` prefix removed
/// - The fragment is included
/// - Other components are **not** UTF-8 percent-encoded
/// - ASCII tabs and newlines in the middle are **not** removed
fn pretend_parse_data_url(input: &str) -> Option<&str> {
// Trim C0 control or space
let left_trimmed = input.trim_left_matches(|ch| ch <= ' ');
let mut bytes = left_trimmed.bytes();
{
// Ignore ASCII tabs or newlines
let mut iter = bytes.by_ref().filter(|&byte| !matches!(byte, b'\t' | b'\n' | b'\r'));
require!(iter.next()?.to_ascii_lowercase() == b'd');
require!(iter.next()?.to_ascii_lowercase() == b'a');
require!(iter.next()?.to_ascii_lowercase() == b't');
require!(iter.next()?.to_ascii_lowercase() == b'a');
require!(iter.next()? == b':');
}
let bytes_consumed = left_trimmed.len() - bytes.len();
let after_colon = &left_trimmed[bytes_consumed..];
// Trim C0 control or space
Some(after_colon.trim_right_matches(|ch| ch <= ' '))
}
fn find_comma_before_fragment(after_colon: &str) -> Option<(&str, &str)> {
for (i, byte) in after_colon.bytes().enumerate() {
if byte == b',' {
return Some((&after_colon[..i], &after_colon[i + 1..]))
}
if byte == b'#' {
break
}
}
None
}
fn parse_header(from_colon_to_comma: &str) -> (mime::Mime, bool) {
let input = from_colon_to_comma.chars()
.filter(|&c| !matches!(c, '\t' | '\n' | '\r')) // Removed by the URL parser
.collect::<String>();
let mut string;
let input = input.trim_matches(' ');
let (mut input, base64) = match without_base64_suffix(input) {
Some(s) => (s, true),
None => (input, false),
};
// FIXME: percent-encode
if input.starts_with(';') {
string = String::from("text/plain");
string.push_str(input);
input = &*string;
}
// FIXME: does Mime::from_str match the MIME Sniffing Standard’s parsing algorithm?
// <https://mimesniff.spec.whatwg.org/#parse-a-mime-type>
let mime_type = input.parse()
.unwrap_or_else(|_| "text/plain;charset=US-ASCII".parse().unwrap());
(mime_type, base64)
}
/// None: no base64 suffix
fn without_base64_suffix(s: &str) -> Option<&str> {
remove_suffix(
remove_suffix(s, "base64", str::eq_ignore_ascii_case)?
.trim_right_matches(' '),
";", str::eq
)
}
fn remove_suffix<'a, Eq>(haystack: &'a str, needle: &str, eq: Eq) -> Option<&'a str>
where Eq: Fn(&str, &str) -> bool
{
let start_index = haystack.len().checked_sub(needle.len())?;
let (before, after) = haystack.split_at(start_index);
if eq(after, needle) {
Some(before)
} else {
None
}
}
/// This is <https://url.spec.whatwg.org/#string-percent-decode> while also:
///
/// * Ignoring ASCII tab or newlines
/// * Stopping at the first '#' (which indicates the start of the fragment)
///
/// Anything that would have been UTF-8 percent-encoded by the URL parser
/// would be percent-decoded here.
/// We skip that round-trip and pass it through unchanged.
fn decode_without_base64<F, E>(encoded_body_plus_fragment: &str, mut write_bytes: F)
-> Result<Option<&str>, E>
where F: FnMut(&[u8]) -> Result<(), E>
{
let bytes = encoded_body_plus_fragment.as_bytes();
let mut slice_start = 0;
for (i, &byte) in bytes.iter().enumerate() {
// We only need to look for 5 different "special" byte values.
// For everything else we make slices as large as possible, borrowing the input,
// in order to make fewer write_all() calls.
if matches!(byte, b'%' | b'#' | b'\t' | b'\n' | b'\r') {
// Write everything (if anything) "non-special" we’ve accumulated
// before this special byte
if i > slice_start {
write_bytes(&bytes[slice_start..i])?;
}
// Then deal with the special byte.
match byte {
b'%' => {
let l = bytes.get(i + 2).and_then(|&b| (b as char).to_digit(16));
let h = bytes.get(i + 1).and_then(|&b| (b as char).to_digit(16));
if let (Some(h), Some(l)) = (h, l) {
// '%' followed by two ASCII hex digits
let one_byte = h as u8 * 0x10 + l as u8;
write_bytes(&[one_byte])?;
slice_start = i + 3;
} else {
// Do nothing. Leave slice_start unchanged.
// The % sign will be part of the next slice.
}
}
b'#' => {
let fragment_start = i + 1;
return Ok(Some(&encoded_body_plus_fragment[fragment_start..]))
}
// Ignore over '\t' | '\n' | '\r'
_ => slice_start = i + 1
}
}
}
write_bytes(&bytes[slice_start..])?;
Ok(None)
}
/// `decode_without_base64()` composed with
/// <https://infra.spec.whatwg.org/#isomorphic-decode> composed with
/// <https://infra.spec.whatwg.org/#forgiving-base64-decode>.
fn decode_with_base64<F, E>(encoded_body_plus_fragment: &str, mut write_bytes: F)
-> Result<Option<&str>, DecodeError<E>>
where F: FnMut(&[u8]) -> Result<(), E>
{
let mut bit_buffer: u32 = 0;
let mut buffer_bit_length: u8 = 0;
let mut padding_symbols: u8 = 0;
let fragment = decode_without_base64::<_, DecodeError<E>>(encoded_body_plus_fragment, |bytes| {
for &byte in bytes.iter() {
let value = BASE64_DECODE_TABLE[byte as usize];
if value < 0 {
// A character that’s not part of the alphabet
// Remove ASCII whitespace
// '\t' | '\n' | '\r' was already filtered by decode_without_base64()
if byte == b' ' || byte == b'\x0C' {
continue
}
if byte == b'=' {
padding_symbols = padding_symbols.saturating_add(8);
continue
}
Err(InvalidBase64(()))?
}
if padding_symbols > 0 {
// Alphabet symbols after padding
Err(InvalidBase64(()))?
}
bit_buffer <<= 6;
bit_buffer |= value as u32;
if buffer_bit_length < 24 {
buffer_bit_length += 6;
} else {
// We’ve accumulated four times 6 bits, which equals three times 8 bits.
let byte_buffer = [
(bit_buffer >> 16) as u8,
(bit_buffer >> 8) as u8,
bit_buffer as u8,
];
write_bytes(&byte_buffer).map_err(DecodeError::WriteError)?;
buffer_bit_length = 0;
// No need to reset bit_buffer,
// since next time we’re only gonna read relevant bits.
}
}
Ok(())
})?;
match (buffer_bit_length, padding_symbols) {
(0, 0) => {
// A multiple of four of alphabet symbols, and nothing else.
}
(12, 2) | (12, 0) => {
// A multiple of four of alphabet symbols, followed by two more symbols,
// optionally followed by two padding characters (which make a total multiple of four).
let byte_buffer = [
(bit_buffer >> 4) as u8,
];
write_bytes(&byte_buffer).map_err(DecodeError::WriteError)?;
}
(18, 1) | (18, 0) => {
// A multiple of four of alphabet symbols, followed by three more symbols,
// optionally followed by one padding character (which make a total multiple of four).
let byte_buffer = [
(bit_buffer >> 10) as u8,
(bit_buffer >> 2) as u8,
];
write_bytes(&byte_buffer).map_err(DecodeError::WriteError)?;
}
_ => {
// No other combination is acceptable
Err(InvalidBase64(()))?
}
}
Ok(fragment)
}
/// Generated by `make_base64_decode_table.py` based on "Table 1: The Base 64 Alphabet"
/// at <https://tools.ietf.org/html/rfc4648#section-4>
///
/// Array indices are the byte value of symbols.
/// Array values are their positions in the base64 alphabet,
/// or -1 for symbols not in the alphabet.
/// The position contributes 6 bits to the decoded bytes.
const BASE64_DECODE_TABLE: [i8; 256] = [
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1,
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1,
-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
];
|
pub mod cards;
Add extern rand crate to crate root.
extern crate rand;
pub mod cards; |
// Copyright 2014-2016 bluss and ndarray developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="ndarray"]
#![doc(html_root_url = "http://bluss.github.io/rust-ndarray/master/")]
//! The `ndarray` crate provides an N-dimensional container for general elements
//! and for numerics.
//!
//! - [**`ArrayBase`**](struct.ArrayBase.html):
//! The N-dimensional array type itself.<br>
//! It is used to implement both the owned arrays and the views; see its docs
//! for an overview of all array features.
//! - The main specific array type is [**`Array`**](type.Array.html), which owns
//! its elements.
//!
//! ## Highlights
//!
//! - Generic N-dimensional array
//! - Slicing, also with arbitrary step size, and negative indices to mean
//! elements from the end of the axis.
//! - Views and subviews of arrays; iterators that yield subviews.
//! - Higher order operations and arithmetic are performant
//! - Array views can be used to slice and mutate any `[T]` data using
//! `ArrayView::from` and `ArrayViewMut::from`.
//!
//! ## Crate Status
//!
//! - Still iterating on and evolving the crate
//! + The crate is continuously developing, and breaking changes are expected
//! during evolution from version to version. We adopt the newest stable
//! rust features if we need them.
//! - Performance:
//! + Prefer higher order methods and arithmetic operations on arrays first,
//! then iteration, and as a last priority using indexed algorithms.
//! + The higher order functions like ``.map()``, ``.map_inplace()`` and
//! ``.zip_mut_with()`` are the most efficient ways to
//! perform single traversal and lock step traversal respectively.
//! + Performance of an operation depends on the memory layout of the array
//! or array view. Especially if it's a binary operation, which
//! needs matching memory layout to be efficient (with some exceptions).
//! + Efficient floating point matrix multiplication even for very large
//! matrices; can optionally use BLAS to improve it further.
//!
//! ## Crate Feature Flags
//!
//! The following crate feature flags are available. They are configured in your
//! `Cargo.toml`.
//!
//! - `rustc-serialize`
//! - Optional, compatible with Rust stable
//! - Enables serialization support for rustc-serialize 0.3
//! - `serde`
//! - Optional, compatible with Rust stable
//! - Enables serialization support for serde 0.8
//! - `blas`
//! - Optional and experimental, compatible with Rust stable
//! - Enable transparent BLAS support for matrix multiplication. Pluggable
//! backend via `blas-sys`.
//!
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "rustc-serialize")]
extern crate rustc_serialize as serialize;
#[cfg(feature="blas")]
extern crate blas_sys;
extern crate matrixmultiply;
extern crate itertools;
extern crate num_traits as libnum;
extern crate num_complex;
use std::iter::Zip;
use std::marker::PhantomData;
use std::rc::Rc;
use std::slice::{self, Iter as SliceIter, IterMut as SliceIterMut};
pub use dimension::{
Dimension,
IntoDimension,
RemoveAxis,
Axis,
};
pub use dimension::dim::*;
pub use dimension::NdIndex;
pub use indexes::Indices;
pub use indexes::{indices, indices_of};
pub use error::{ShapeError, ErrorKind};
pub use si::{Si, S};
use iterators::Baseiter;
pub use iterators::{
InnerIter,
InnerIterMut,
AxisIter,
AxisIterMut,
AxisChunksIter,
AxisChunksIterMut,
};
pub use arraytraits::AsArray;
pub use linalg_traits::{LinalgScalar, NdFloat};
pub use stacking::stack;
pub use shape_builder::{ ShapeBuilder};
mod aliases;
mod arraytraits;
#[cfg(feature = "serde")]
mod array_serde;
#[cfg(feature = "rustc-serialize")]
mod array_serialize;
mod arrayformat;
mod data_traits;
pub use aliases::*;
pub use data_traits::{
Data,
DataMut,
DataOwned,
DataShared,
DataClone,
};
mod dimension;
mod free_functions;
pub use free_functions::*;
mod indexes;
mod iterators;
mod linalg_traits;
mod linspace;
mod numeric_util;
mod si;
mod error;
mod shape_builder;
mod stacking;
/// Implementation's prelude. Common types used everywhere.
mod imp_prelude {
pub use prelude::*;
pub use {
RemoveAxis,
Data,
DataMut,
DataOwned,
DataShared,
ViewRepr,
};
pub use dimension::DimensionExt;
/// Wrapper type for private methods
#[derive(Copy, Clone, Debug)]
pub struct Priv<T>(pub T);
}
pub mod prelude;
/// Array index type
pub type Ix = usize;
/// Array index type (signed)
pub type Ixs = isize;
/// An *N*-dimensional array.
///
/// The array is a general container of elements. It cannot grow or shrink, but
/// can be sliced into subsets of its data.
/// The array supports arithmetic operations by applying them elementwise.
///
/// The `ArrayBase<S, D>` is parameterized by `S` for the data container and
/// `D` for the dimensionality.
///
/// Type aliases [`Array`], [`RcArray`], [`ArrayView`], and [`ArrayViewMut`] refer
/// to `ArrayBase` with different types for the data container.
///
/// [`Array`]: type.Array.html
/// [`RcArray`]: type.RcArray.html
/// [`ArrayView`]: type.ArrayView.html
/// [`ArrayViewMut`]: type.ArrayViewMut.html
///
/// ## Contents
///
/// + [Array](#array)
/// + [RcArray](#rcarray)
/// + [Array Views](#array-views)
/// + [Indexing and Dimension](#indexing-and-dimension)
/// + [Slicing](#slicing)
/// + [Subviews](#subviews)
/// + [Arithmetic Operations](#arithmetic-operations)
/// + [Broadcasting](#broadcasting)
/// + [Constructor Methods for Owned Arrays](#constructor-methods-for-owned-arrays)
/// + [Methods For All Array Types](#methods-for-all-array-types)
/// + [Methods Specific to Array Views](#methods-specific-to-array-views)
///
///
///
///
/// ## `Array`
///
/// [`Array`](type.Array.html) is an owned array that ows the underlying array
/// elements directly (just like a `Vec`) and it is the default way to create and
/// store n-dimensional data. `Array<A, D>` has two type parameters: `A` for
/// the element type, and `D` for the dimensionality. A particular
/// dimensionality's type alias like `Array3<A>` just has the type parameter
/// `A` for element type.
///
/// An example:
///
/// ```
/// // Create a three-dimensional f64 array, initialized with zeros
/// use ndarray::Array3;
/// let mut temperature = Array3::<f64>::zeros((3, 4, 5));
/// // Increase the temperature in this location
/// temperature[[2, 2, 2]] += 0.5;
/// ```
///
/// ## `RcArray`
///
/// [`RcArray`](type.RcArray.html) is an owned array with reference counted
/// data (shared ownership).
/// Sharing requires that it uses copy-on-write for mutable operations.
/// Calling a method for mutating elements on `RcArray`, for example
/// [`view_mut()`](#method.view_mut) or [`get_mut()`](#method.get_mut),
/// will break sharing and require a clone of the data (if it is not uniquely held).
///
/// ## Array Views
///
/// `ArrayView` and `ArrayViewMut` are read-only and read-write array views
/// respectively. They use dimensionality, indexing, and almost all other
/// methods the same was as the other array types.
///
/// A view is created from an array using `.view()`, `.view_mut()`, using
/// slicing (`.slice()`, `.slice_mut()`) or from one of the many iterators
/// that yield array views.
///
/// You can also create an array view from a regular slice of data not
/// allocated with `Array` — see [Methods Specific to Array
/// Views](#methods-specific-to-array-views).
///
/// Note that all `ArrayBase` variants can change their view (slicing) of the
/// data freely, even when their data can’t be mutated.
///
/// ## Indexing and Dimension
///
/// The dimensionality of the array determines the number of *axes*, for example
/// a 2D array has two axes. These are listed in “big endian” order, so that
/// the greatest dimension is listed first, the lowest dimension with the most
/// rapidly varying index is the last.
///
/// In a 2D array the index of each element is `[row, column]` as seen in this
/// 4 × 3 example:
///
/// ```ignore
/// [[ [0, 0], [0, 1], [0, 2] ], // row 0
/// [ [1, 0], [1, 1], [1, 2] ], // row 1
/// [ [2, 0], [2, 1], [2, 2] ], // row 2
/// [ [3, 0], [3, 1], [3, 2] ]] // row 3
/// // \ \ \
/// // column 0 \ column 2
/// // column 1
/// ```
///
/// The number of axes for an array is fixed by its `D` type parameter: `Ix1`
/// for a 1D array, `Ix2` for a 2D array etc. The dimension type `IxDyn` allows
/// a dynamic number of axes.
///
/// A fixed size array (`[usize; N]`) of the corresponding dimensionality is
/// used to index the `Array`, making the syntax `array[[` i, j, ...`]]`
///
/// ```
/// use ndarray::Array2;
/// let mut array = Array2::zeros((4, 3));
/// array[[1, 1]] = 7;
/// ```
///
/// Important traits and types for dimension and indexing:
///
/// - A [`Dim`](Dim.t.html) value represents a dimensionality or index.
/// - Trait [`Dimension`](Dimension.t.html) is implemented by all
/// dimensionalities. It defines many operations for dimensions and indices.
/// - Trait [`IntoDimension`](IntoDimension.t.html) is used to convert into a
/// `Dim` value.
/// - Trait [`ShapeBuilder`](ShapeBuilder.t.html) is an extension of
/// `IntoDimension` and is used when constructing an array. A shape describes
/// not just the extent of each axis but also their strides.
/// - Trait [`NdIndex`](NdIndex.t.html) is an extension of `Dimension` and is
/// for values that can be used with indexing syntax.
///
///
/// The default memory order of an array is *row major* order (a.k.a “c” order),
/// where each row is contiguous in memory.
/// A *column major* (a.k.a. “f” or fortran) memory order array has
/// columns (or, in general, the outermost axis) with contiguous elements.
///
/// The logical order of any array’s elements is the row major order
/// (the rightmost index is varying the fastest).
/// The iterators `.iter(), .iter_mut()` always adhere to this order, for example.
///
/// ## Slicing
///
/// You can use slicing to create a view of a subset of the data in
/// the array. Slicing methods include `.slice()`, `.islice()`,
/// `.slice_mut()`.
///
/// The slicing argument can be passed using the macro [`s![]`](macro.s!.html),
/// which will be used in all examples. (The explicit form is a reference
/// to a fixed size array of [`Si`]; see its docs for more information.)
/// [`Si`]: struct.Si.html
///
/// ```
/// // import the s![] macro
/// #[macro_use(s)]
/// extern crate ndarray;
///
/// use ndarray::arr3;
///
/// fn main() {
///
/// // 2 submatrices of 2 rows with 3 elements per row, means a shape of `[2, 2, 3]`.
///
/// let a = arr3(&[[[ 1, 2, 3], // -- 2 rows \_
/// [ 4, 5, 6]], // -- /
/// [[ 7, 8, 9], // \_ 2 submatrices
/// [10, 11, 12]]]); // /
/// // 3 columns ..../.../.../
///
/// assert_eq!(a.shape(), &[2, 2, 3]);
///
/// // Let’s create a slice with
/// //
/// // - Both of the submatrices of the greatest dimension: `..`
/// // - Only the first row in each submatrix: `0..1`
/// // - Every element in each row: `..`
///
/// let b = a.slice(s![.., 0..1, ..]);
/// // without the macro, the explicit argument is `&[S, Si(0, Some(1), 1), S]`
///
/// let c = arr3(&[[[ 1, 2, 3]],
/// [[ 7, 8, 9]]]);
/// assert_eq!(b, c);
/// assert_eq!(b.shape(), &[2, 1, 3]);
///
/// // Let’s create a slice with
/// //
/// // - Both submatrices of the greatest dimension: `..`
/// // - The last row in each submatrix: `-1..`
/// // - Row elements in reverse order: `..;-1`
/// let d = a.slice(s![.., -1.., ..;-1]);
/// let e = arr3(&[[[ 6, 5, 4]],
/// [[12, 11, 10]]]);
/// assert_eq!(d, e);
/// }
/// ```
///
/// ## Subviews
///
/// Subview methods allow you to restrict the array view while removing
/// one axis from the array. Subview methods include `.subview()`,
/// `.isubview()`, `.subview_mut()`.
///
/// Subview takes two arguments: `axis` and `index`.
///
/// ```
/// use ndarray::{arr3, aview2, Axis};
///
/// // 2 submatrices of 2 rows with 3 elements per row, means a shape of `[2, 2, 3]`.
///
/// let a = arr3(&[[[ 1, 2, 3], // \ axis 0, submatrix 0
/// [ 4, 5, 6]], // /
/// [[ 7, 8, 9], // \ axis 0, submatrix 1
/// [10, 11, 12]]]); // /
/// // \
/// // axis 2, column 0
///
/// assert_eq!(a.shape(), &[2, 2, 3]);
///
/// // Let’s take a subview along the greatest dimension (axis 0),
/// // taking submatrix 0, then submatrix 1
///
/// let sub_0 = a.subview(Axis(0), 0);
/// let sub_1 = a.subview(Axis(0), 1);
///
/// assert_eq!(sub_0, aview2(&[[ 1, 2, 3],
/// [ 4, 5, 6]]));
/// assert_eq!(sub_1, aview2(&[[ 7, 8, 9],
/// [10, 11, 12]]));
/// assert_eq!(sub_0.shape(), &[2, 3]);
///
/// // This is the subview picking only axis 2, column 0
/// let sub_col = a.subview(Axis(2), 0);
///
/// assert_eq!(sub_col, aview2(&[[ 1, 4],
/// [ 7, 10]]));
/// ```
///
/// `.isubview()` modifies the view in the same way as `subview()`, but
/// since it is *in place*, it cannot remove the collapsed axis. It becomes
/// an axis of length 1.
///
/// `.outer_iter()` is an iterator of every subview along the zeroth (outer)
/// axis, while `.axis_iter()` is an iterator of every subview along a
/// specific axis.
///
/// ## Arithmetic Operations
///
/// Arrays support all arithmetic operations the same way: they apply elementwise.
///
/// Since the trait implementations are hard to overview, here is a summary.
///
/// Let `A` be an array or view of any kind. Let `B` be an array
/// with owned storage (either `Array` or `RcArray`).
/// Let `C` be an array with mutable data (either `Array`, `RcArray`
/// or `ArrayViewMut`).
/// The following combinations of operands
/// are supported for an arbitrary binary operator denoted by `@` (it can be
/// `+`, `-`, `*`, `/` and so on).
///
/// - `&A @ &A` which produces a new `Array`
/// - `B @ A` which consumes `B`, updates it with the result, and returns it
/// - `B @ &A` which consumes `B`, updates it with the result, and returns it
/// - `C @= &A` which performs an arithmetic operation in place
///
/// The trait [`ScalarOperand`](trait.ScalarOperand.html) marks types that can be used in arithmetic
/// with arrays directly. For a scalar `K` the following combinations of operands
/// are supported (scalar can be on either the left or right side, but
/// `ScalarOperand` docs has the detailed condtions).
///
/// - `&A @ K` or `K @ &A` which produces a new `Array`
/// - `B @ K` or `K @ B` which consumes `B`, updates it with the result and returns it
/// - `C @= K` which performs an arithmetic operation in place
///
/// ## Broadcasting
///
/// Arrays support limited *broadcasting*, where arithmetic operations with
/// array operands of different sizes can be carried out by repeating the
/// elements of the smaller dimension array. See
/// [`.broadcast()`](#method.broadcast) for a more detailed
/// description.
///
/// ```
/// use ndarray::arr2;
///
/// let a = arr2(&[[1., 1.],
/// [1., 2.]]);
/// let b = arr2(&[[0., 1.]]);
///
/// let c = arr2(&[[1., 2.],
/// [1., 3.]]);
/// // We can add because the shapes are compatible even if not equal.
/// assert!(
/// c == a + b
/// );
/// ```
///
pub struct ArrayBase<S, D>
where S: Data
{
/// Rc data when used as view, Uniquely held data when being mutated
data: S,
/// A pointer into the buffer held by data, may point anywhere
/// in its range.
ptr: *mut S::Elem,
/// The size of each axis
dim: D,
/// The element count stride per axis. To be parsed as `isize`.
strides: D,
}
/// An array where the data has shared ownership and is copy on write.
/// It can act as both an owner as the data as well as a shared reference (view
/// like).
pub type RcArray<A, D> = ArrayBase<Rc<Vec<A>>, D>;
/// An array that owns its data uniquely.
///
/// `Array` is the main n-dimensional array type, and it owns all its array
/// elements.
///
/// [**`ArrayBase`**](struct.ArrayBase.html) is used to implement both the owned
/// arrays and the views; see its docs for an overview of all array features.
///
/// See also:
///
/// + [Constructor Methods for Owned Arrays](struct.ArrayBase.html#constructor-methods-for-owned-arrays)
/// + [Methods For All Array Types](struct.ArrayBase.html#methods-for-all-array-types)
/// + Dimensionality-specific type alises
/// [`Array1`](Array1.t.html),
/// [`Array2`](Array2.t.html),
/// [`Array3`](Array3.t.html) and so on.
pub type Array<A, D> = ArrayBase<Vec<A>, D>;
#[deprecated(note="Use the type alias `Array` instead")]
/// Array where the data is owned uniquely.
pub type OwnedArray<A, D> = ArrayBase<Vec<A>, D>;
/// A lightweight array view.
///
/// An array view represents an array or a part of it, created from
/// an iterator, subview or slice of an array.
///
/// Array views have all the methods of an array (see [`ArrayBase`][ab]).
///
/// See also [**Methods Specific To Array Views**](struct.ArrayBase.html#methods-specific-to-array-views)
///
/// [ab]: struct.ArrayBase.html
pub type ArrayView<'a, A, D> = ArrayBase<ViewRepr<&'a A>, D>;
/// A lightweight read-write array view.
///
/// An array view represents an array or a part of it, created from
/// an iterator, subview or slice of an array.
///
/// Array views have all the methods of an array (see [`ArrayBase`][ab]).
///
/// See also [**Methods Specific To Array Views**](struct.ArrayBase.html#methods-specific-to-array-views)
///
/// [ab]: struct.ArrayBase.html
pub type ArrayViewMut<'a, A, D> = ArrayBase<ViewRepr<&'a mut A>, D>;
/// Array view’s representation.
#[derive(Copy, Clone)]
// This is just a marker type, to carry the lifetime parameter.
pub struct ViewRepr<A> {
life: PhantomData<A>,
}
impl<A> ViewRepr<A> {
#[inline(always)]
fn new() -> Self {
ViewRepr { life: PhantomData }
}
}
mod impl_clone;
mod impl_constructors;
mod impl_methods;
mod impl_owned_array;
/// Private Methods
impl<A, S, D> ArrayBase<S, D>
where S: Data<Elem=A>, D: Dimension
{
#[inline]
fn broadcast_unwrap<E>(&self, dim: E) -> ArrayView<A, E>
where E: Dimension,
{
#[cold]
#[inline(never)]
fn broadcast_panic<D, E>(from: &D, to: &E) -> !
where D: Dimension,
E: Dimension,
{
panic!("ndarray: could not broadcast array from shape: {:?} to: {:?}",
from.slice(), to.slice())
}
match self.broadcast(dim.clone()) {
Some(it) => it,
None => broadcast_panic(&self.dim, &dim),
}
}
/// Apply closure `f` to each element in the array, in whatever
/// order is the fastest to visit.
fn unordered_foreach_mut<F>(&mut self, mut f: F)
where S: DataMut,
F: FnMut(&mut A)
{
if let Some(slc) = self.as_slice_memory_order_mut() {
// FIXME: Use for loop when slice iterator is perf is restored
for i in 0..slc.len() {
f(&mut slc[i]);
}
return;
}
for row in self.inner_iter_mut() {
row.into_iter_().fold((), |(), elt| f(elt));
}
}
}
mod impl_2d;
mod numeric;
pub mod linalg;
mod impl_ops;
pub use impl_ops::ScalarOperand;
// Array view methods
mod impl_views;
/// Private array view methods
impl<'a, A, D> ArrayBase<ViewRepr<&'a A>, D>
where D: Dimension,
{
/// Create a new `ArrayView`
///
/// Unsafe because: `ptr` must be valid for the given dimension and strides.
#[inline(always)]
unsafe fn new_(ptr: *const A, dim: D, strides: D) -> Self {
ArrayView {
data: ViewRepr::new(),
ptr: ptr as *mut A,
dim: dim,
strides: strides,
}
}
#[inline]
fn into_base_iter(self) -> Baseiter<'a, A, D> {
unsafe {
Baseiter::new(self.ptr, self.dim.clone(), self.strides.clone())
}
}
#[inline]
fn into_elements_base(self) -> ElementsBase<'a, A, D> {
ElementsBase { inner: self.into_base_iter() }
}
fn into_iter_(self) -> Iter<'a, A, D> {
Iter {
inner: if let Some(slc) = self.into_slice() {
ElementsRepr::Slice(slc.iter())
} else {
ElementsRepr::Counted(self.into_elements_base())
},
}
}
/// Return an outer iterator for this view.
#[doc(hidden)] // not official
#[deprecated(note="This method will be replaced.")]
pub fn into_outer_iter(self) -> AxisIter<'a, A, D::Smaller>
where D: RemoveAxis,
{
iterators::new_outer_iter(self)
}
}
impl<'a, A, D> ArrayBase<ViewRepr<&'a mut A>, D>
where D: Dimension,
{
/// Create a new `ArrayView`
///
/// Unsafe because: `ptr` must be valid for the given dimension and strides.
#[inline(always)]
unsafe fn new_(ptr: *mut A, dim: D, strides: D) -> Self {
ArrayViewMut {
data: ViewRepr::new(),
ptr: ptr,
dim: dim,
strides: strides,
}
}
#[inline]
fn into_base_iter(self) -> Baseiter<'a, A, D> {
unsafe {
Baseiter::new(self.ptr, self.dim.clone(), self.strides.clone())
}
}
#[inline]
fn into_elements_base(self) -> ElementsBaseMut<'a, A, D> {
ElementsBaseMut { inner: self.into_base_iter() }
}
fn into_iter_(self) -> IterMut<'a, A, D> {
IterMut {
inner:
if self.is_standard_layout() {
let slc = unsafe {
slice::from_raw_parts_mut(self.ptr, self.len())
};
ElementsRepr::Slice(slc.iter_mut())
} else {
ElementsRepr::Counted(self.into_elements_base())
}
}
}
/// Return an outer iterator for this view.
#[doc(hidden)] // not official
#[deprecated(note="This method will be replaced.")]
pub fn into_outer_iter(self) -> AxisIterMut<'a, A, D::Smaller>
where D: RemoveAxis,
{
iterators::new_outer_iter_mut(self)
}
}
/// An iterator over the elements of an array.
///
/// Iterator element type is `&'a A`.
///
/// See [`.iter()`](struct.ArrayBase.html#method.iter) for more information.
pub struct Iter<'a, A: 'a, D> {
inner: ElementsRepr<SliceIter<'a, A>, ElementsBase<'a, A, D>>,
}
/// Counted read only iterator
struct ElementsBase<'a, A: 'a, D> {
inner: Baseiter<'a, A, D>,
}
/// An iterator over the elements of an array (mutable).
///
/// Iterator element type is `&'a mut A`.
///
/// See [`.iter_mut()`](struct.ArrayBase.html#method.iter_mut) for more information.
pub struct IterMut<'a, A: 'a, D> {
inner: ElementsRepr<SliceIterMut<'a, A>, ElementsBaseMut<'a, A, D>>,
}
/// An iterator over the elements of an array.
///
/// Iterator element type is `&'a mut A`.
struct ElementsBaseMut<'a, A: 'a, D> {
inner: Baseiter<'a, A, D>,
}
/// An iterator over the indexes and elements of an array.
///
/// See [`.indexed_iter()`](struct.ArrayBase.html#method.indexed_iter) for more information.
#[derive(Clone)]
pub struct IndexedIter<'a, A: 'a, D>(ElementsBase<'a, A, D>);
/// An iterator over the indexes and elements of an array (mutable).
///
/// See [`.indexed_iter_mut()`](struct.ArrayBase.html#method.indexed_iter_mut) for more information.
pub struct IndexedIterMut<'a, A: 'a, D>(ElementsBaseMut<'a, A, D>);
fn zipsl<'a, 'b, A, B>(t: &'a [A], u: &'b [B])
-> Zip<SliceIter<'a, A>, SliceIter<'b, B>> {
t.iter().zip(u)
}
fn zipsl_mut<'a, 'b, A, B>(t: &'a mut [A], u: &'b mut [B])
-> Zip<SliceIterMut<'a, A>, SliceIterMut<'b, B>> {
t.iter_mut().zip(u)
}
use itertools::{cons_tuples, ConsTuples};
trait ZipExt : Iterator {
fn zip_cons<J>(self, iter: J) -> ConsTuples<Zip<Self, J::IntoIter>, (Self::Item, J::Item)>
where J: IntoIterator,
Self: Sized,
{
cons_tuples(self.zip(iter))
}
}
impl<I> ZipExt for I where I: Iterator { }
enum ElementsRepr<S, C> {
Slice(S),
Counted(C),
}
/// A contiguous array shape of n dimensions.
///
/// Either c- or f- memory ordered (*c* a.k.a *row major* is the default).
#[derive(Copy, Clone, Debug)]
pub struct Shape<D> {
dim: D,
is_c: bool,
}
/// An array shape of n dimensions in c-order, f-order or custom strides.
#[derive(Copy, Clone, Debug)]
pub struct StrideShape<D> {
dim: D,
strides: D,
custom: bool,
}
Clarify broadcasting docs
// Copyright 2014-2016 bluss and ndarray developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="ndarray"]
#![doc(html_root_url = "http://bluss.github.io/rust-ndarray/master/")]
//! The `ndarray` crate provides an N-dimensional container for general elements
//! and for numerics.
//!
//! - [**`ArrayBase`**](struct.ArrayBase.html):
//! The N-dimensional array type itself.<br>
//! It is used to implement both the owned arrays and the views; see its docs
//! for an overview of all array features.
//! - The main specific array type is [**`Array`**](type.Array.html), which owns
//! its elements.
//!
//! ## Highlights
//!
//! - Generic N-dimensional array
//! - Slicing, also with arbitrary step size, and negative indices to mean
//! elements from the end of the axis.
//! - Views and subviews of arrays; iterators that yield subviews.
//! - Higher order operations and arithmetic are performant
//! - Array views can be used to slice and mutate any `[T]` data using
//! `ArrayView::from` and `ArrayViewMut::from`.
//!
//! ## Crate Status
//!
//! - Still iterating on and evolving the crate
//! + The crate is continuously developing, and breaking changes are expected
//! during evolution from version to version. We adopt the newest stable
//! rust features if we need them.
//! - Performance:
//! + Prefer higher order methods and arithmetic operations on arrays first,
//! then iteration, and as a last priority using indexed algorithms.
//! + The higher order functions like ``.map()``, ``.map_inplace()`` and
//! ``.zip_mut_with()`` are the most efficient ways to
//! perform single traversal and lock step traversal respectively.
//! + Performance of an operation depends on the memory layout of the array
//! or array view. Especially if it's a binary operation, which
//! needs matching memory layout to be efficient (with some exceptions).
//! + Efficient floating point matrix multiplication even for very large
//! matrices; can optionally use BLAS to improve it further.
//!
//! ## Crate Feature Flags
//!
//! The following crate feature flags are available. They are configured in your
//! `Cargo.toml`.
//!
//! - `rustc-serialize`
//! - Optional, compatible with Rust stable
//! - Enables serialization support for rustc-serialize 0.3
//! - `serde`
//! - Optional, compatible with Rust stable
//! - Enables serialization support for serde 0.8
//! - `blas`
//! - Optional and experimental, compatible with Rust stable
//! - Enable transparent BLAS support for matrix multiplication. Pluggable
//! backend via `blas-sys`.
//!
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "rustc-serialize")]
extern crate rustc_serialize as serialize;
#[cfg(feature="blas")]
extern crate blas_sys;
extern crate matrixmultiply;
extern crate itertools;
extern crate num_traits as libnum;
extern crate num_complex;
use std::iter::Zip;
use std::marker::PhantomData;
use std::rc::Rc;
use std::slice::{self, Iter as SliceIter, IterMut as SliceIterMut};
pub use dimension::{
Dimension,
IntoDimension,
RemoveAxis,
Axis,
};
pub use dimension::dim::*;
pub use dimension::NdIndex;
pub use indexes::Indices;
pub use indexes::{indices, indices_of};
pub use error::{ShapeError, ErrorKind};
pub use si::{Si, S};
use iterators::Baseiter;
pub use iterators::{
InnerIter,
InnerIterMut,
AxisIter,
AxisIterMut,
AxisChunksIter,
AxisChunksIterMut,
};
pub use arraytraits::AsArray;
pub use linalg_traits::{LinalgScalar, NdFloat};
pub use stacking::stack;
pub use shape_builder::{ ShapeBuilder};
mod aliases;
mod arraytraits;
#[cfg(feature = "serde")]
mod array_serde;
#[cfg(feature = "rustc-serialize")]
mod array_serialize;
mod arrayformat;
mod data_traits;
pub use aliases::*;
pub use data_traits::{
Data,
DataMut,
DataOwned,
DataShared,
DataClone,
};
mod dimension;
mod free_functions;
pub use free_functions::*;
mod indexes;
mod iterators;
mod linalg_traits;
mod linspace;
mod numeric_util;
mod si;
mod error;
mod shape_builder;
mod stacking;
/// Implementation's prelude. Common types used everywhere.
mod imp_prelude {
pub use prelude::*;
pub use {
RemoveAxis,
Data,
DataMut,
DataOwned,
DataShared,
ViewRepr,
};
pub use dimension::DimensionExt;
/// Wrapper type for private methods
#[derive(Copy, Clone, Debug)]
pub struct Priv<T>(pub T);
}
pub mod prelude;
/// Array index type
pub type Ix = usize;
/// Array index type (signed)
pub type Ixs = isize;
/// An *N*-dimensional array.
///
/// The array is a general container of elements. It cannot grow or shrink, but
/// can be sliced into subsets of its data.
/// The array supports arithmetic operations by applying them elementwise.
///
/// The `ArrayBase<S, D>` is parameterized by `S` for the data container and
/// `D` for the dimensionality.
///
/// Type aliases [`Array`], [`RcArray`], [`ArrayView`], and [`ArrayViewMut`] refer
/// to `ArrayBase` with different types for the data container.
///
/// [`Array`]: type.Array.html
/// [`RcArray`]: type.RcArray.html
/// [`ArrayView`]: type.ArrayView.html
/// [`ArrayViewMut`]: type.ArrayViewMut.html
///
/// ## Contents
///
/// + [Array](#array)
/// + [RcArray](#rcarray)
/// + [Array Views](#array-views)
/// + [Indexing and Dimension](#indexing-and-dimension)
/// + [Slicing](#slicing)
/// + [Subviews](#subviews)
/// + [Arithmetic Operations](#arithmetic-operations)
/// + [Broadcasting](#broadcasting)
/// + [Constructor Methods for Owned Arrays](#constructor-methods-for-owned-arrays)
/// + [Methods For All Array Types](#methods-for-all-array-types)
/// + [Methods Specific to Array Views](#methods-specific-to-array-views)
///
///
///
///
/// ## `Array`
///
/// [`Array`](type.Array.html) is an owned array that ows the underlying array
/// elements directly (just like a `Vec`) and it is the default way to create and
/// store n-dimensional data. `Array<A, D>` has two type parameters: `A` for
/// the element type, and `D` for the dimensionality. A particular
/// dimensionality's type alias like `Array3<A>` just has the type parameter
/// `A` for element type.
///
/// An example:
///
/// ```
/// // Create a three-dimensional f64 array, initialized with zeros
/// use ndarray::Array3;
/// let mut temperature = Array3::<f64>::zeros((3, 4, 5));
/// // Increase the temperature in this location
/// temperature[[2, 2, 2]] += 0.5;
/// ```
///
/// ## `RcArray`
///
/// [`RcArray`](type.RcArray.html) is an owned array with reference counted
/// data (shared ownership).
/// Sharing requires that it uses copy-on-write for mutable operations.
/// Calling a method for mutating elements on `RcArray`, for example
/// [`view_mut()`](#method.view_mut) or [`get_mut()`](#method.get_mut),
/// will break sharing and require a clone of the data (if it is not uniquely held).
///
/// ## Array Views
///
/// `ArrayView` and `ArrayViewMut` are read-only and read-write array views
/// respectively. They use dimensionality, indexing, and almost all other
/// methods the same was as the other array types.
///
/// A view is created from an array using `.view()`, `.view_mut()`, using
/// slicing (`.slice()`, `.slice_mut()`) or from one of the many iterators
/// that yield array views.
///
/// You can also create an array view from a regular slice of data not
/// allocated with `Array` — see [Methods Specific to Array
/// Views](#methods-specific-to-array-views).
///
/// Note that all `ArrayBase` variants can change their view (slicing) of the
/// data freely, even when their data can’t be mutated.
///
/// ## Indexing and Dimension
///
/// The dimensionality of the array determines the number of *axes*, for example
/// a 2D array has two axes. These are listed in “big endian” order, so that
/// the greatest dimension is listed first, the lowest dimension with the most
/// rapidly varying index is the last.
///
/// In a 2D array the index of each element is `[row, column]` as seen in this
/// 4 × 3 example:
///
/// ```ignore
/// [[ [0, 0], [0, 1], [0, 2] ], // row 0
/// [ [1, 0], [1, 1], [1, 2] ], // row 1
/// [ [2, 0], [2, 1], [2, 2] ], // row 2
/// [ [3, 0], [3, 1], [3, 2] ]] // row 3
/// // \ \ \
/// // column 0 \ column 2
/// // column 1
/// ```
///
/// The number of axes for an array is fixed by its `D` type parameter: `Ix1`
/// for a 1D array, `Ix2` for a 2D array etc. The dimension type `IxDyn` allows
/// a dynamic number of axes.
///
/// A fixed size array (`[usize; N]`) of the corresponding dimensionality is
/// used to index the `Array`, making the syntax `array[[` i, j, ...`]]`
///
/// ```
/// use ndarray::Array2;
/// let mut array = Array2::zeros((4, 3));
/// array[[1, 1]] = 7;
/// ```
///
/// Important traits and types for dimension and indexing:
///
/// - A [`Dim`](Dim.t.html) value represents a dimensionality or index.
/// - Trait [`Dimension`](Dimension.t.html) is implemented by all
/// dimensionalities. It defines many operations for dimensions and indices.
/// - Trait [`IntoDimension`](IntoDimension.t.html) is used to convert into a
/// `Dim` value.
/// - Trait [`ShapeBuilder`](ShapeBuilder.t.html) is an extension of
/// `IntoDimension` and is used when constructing an array. A shape describes
/// not just the extent of each axis but also their strides.
/// - Trait [`NdIndex`](NdIndex.t.html) is an extension of `Dimension` and is
/// for values that can be used with indexing syntax.
///
///
/// The default memory order of an array is *row major* order (a.k.a “c” order),
/// where each row is contiguous in memory.
/// A *column major* (a.k.a. “f” or fortran) memory order array has
/// columns (or, in general, the outermost axis) with contiguous elements.
///
/// The logical order of any array’s elements is the row major order
/// (the rightmost index is varying the fastest).
/// The iterators `.iter(), .iter_mut()` always adhere to this order, for example.
///
/// ## Slicing
///
/// You can use slicing to create a view of a subset of the data in
/// the array. Slicing methods include `.slice()`, `.islice()`,
/// `.slice_mut()`.
///
/// The slicing argument can be passed using the macro [`s![]`](macro.s!.html),
/// which will be used in all examples. (The explicit form is a reference
/// to a fixed size array of [`Si`]; see its docs for more information.)
/// [`Si`]: struct.Si.html
///
/// ```
/// // import the s![] macro
/// #[macro_use(s)]
/// extern crate ndarray;
///
/// use ndarray::arr3;
///
/// fn main() {
///
/// // 2 submatrices of 2 rows with 3 elements per row, means a shape of `[2, 2, 3]`.
///
/// let a = arr3(&[[[ 1, 2, 3], // -- 2 rows \_
/// [ 4, 5, 6]], // -- /
/// [[ 7, 8, 9], // \_ 2 submatrices
/// [10, 11, 12]]]); // /
/// // 3 columns ..../.../.../
///
/// assert_eq!(a.shape(), &[2, 2, 3]);
///
/// // Let’s create a slice with
/// //
/// // - Both of the submatrices of the greatest dimension: `..`
/// // - Only the first row in each submatrix: `0..1`
/// // - Every element in each row: `..`
///
/// let b = a.slice(s![.., 0..1, ..]);
/// // without the macro, the explicit argument is `&[S, Si(0, Some(1), 1), S]`
///
/// let c = arr3(&[[[ 1, 2, 3]],
/// [[ 7, 8, 9]]]);
/// assert_eq!(b, c);
/// assert_eq!(b.shape(), &[2, 1, 3]);
///
/// // Let’s create a slice with
/// //
/// // - Both submatrices of the greatest dimension: `..`
/// // - The last row in each submatrix: `-1..`
/// // - Row elements in reverse order: `..;-1`
/// let d = a.slice(s![.., -1.., ..;-1]);
/// let e = arr3(&[[[ 6, 5, 4]],
/// [[12, 11, 10]]]);
/// assert_eq!(d, e);
/// }
/// ```
///
/// ## Subviews
///
/// Subview methods allow you to restrict the array view while removing
/// one axis from the array. Subview methods include `.subview()`,
/// `.isubview()`, `.subview_mut()`.
///
/// Subview takes two arguments: `axis` and `index`.
///
/// ```
/// use ndarray::{arr3, aview2, Axis};
///
/// // 2 submatrices of 2 rows with 3 elements per row, means a shape of `[2, 2, 3]`.
///
/// let a = arr3(&[[[ 1, 2, 3], // \ axis 0, submatrix 0
/// [ 4, 5, 6]], // /
/// [[ 7, 8, 9], // \ axis 0, submatrix 1
/// [10, 11, 12]]]); // /
/// // \
/// // axis 2, column 0
///
/// assert_eq!(a.shape(), &[2, 2, 3]);
///
/// // Let’s take a subview along the greatest dimension (axis 0),
/// // taking submatrix 0, then submatrix 1
///
/// let sub_0 = a.subview(Axis(0), 0);
/// let sub_1 = a.subview(Axis(0), 1);
///
/// assert_eq!(sub_0, aview2(&[[ 1, 2, 3],
/// [ 4, 5, 6]]));
/// assert_eq!(sub_1, aview2(&[[ 7, 8, 9],
/// [10, 11, 12]]));
/// assert_eq!(sub_0.shape(), &[2, 3]);
///
/// // This is the subview picking only axis 2, column 0
/// let sub_col = a.subview(Axis(2), 0);
///
/// assert_eq!(sub_col, aview2(&[[ 1, 4],
/// [ 7, 10]]));
/// ```
///
/// `.isubview()` modifies the view in the same way as `subview()`, but
/// since it is *in place*, it cannot remove the collapsed axis. It becomes
/// an axis of length 1.
///
/// `.outer_iter()` is an iterator of every subview along the zeroth (outer)
/// axis, while `.axis_iter()` is an iterator of every subview along a
/// specific axis.
///
/// ## Arithmetic Operations
///
/// Arrays support all arithmetic operations the same way: they apply elementwise.
///
/// Since the trait implementations are hard to overview, here is a summary.
///
/// Let `A` be an array or view of any kind. Let `B` be an array
/// with owned storage (either `Array` or `RcArray`).
/// Let `C` be an array with mutable data (either `Array`, `RcArray`
/// or `ArrayViewMut`).
/// The following combinations of operands
/// are supported for an arbitrary binary operator denoted by `@` (it can be
/// `+`, `-`, `*`, `/` and so on).
///
/// - `&A @ &A` which produces a new `Array`
/// - `B @ A` which consumes `B`, updates it with the result, and returns it
/// - `B @ &A` which consumes `B`, updates it with the result, and returns it
/// - `C @= &A` which performs an arithmetic operation in place
///
/// The trait [`ScalarOperand`](trait.ScalarOperand.html) marks types that can be used in arithmetic
/// with arrays directly. For a scalar `K` the following combinations of operands
/// are supported (scalar can be on either the left or right side, but
/// `ScalarOperand` docs has the detailed condtions).
///
/// - `&A @ K` or `K @ &A` which produces a new `Array`
/// - `B @ K` or `K @ B` which consumes `B`, updates it with the result and returns it
/// - `C @= K` which performs an arithmetic operation in place
///
/// ## Broadcasting
///
/// Arrays support limited *broadcasting*, where arithmetic operations with
/// array operands of different sizes can be carried out by repeating the
/// elements of the smaller dimension array. See
/// [`.broadcast()`](#method.broadcast) for a more detailed
/// description.
///
/// ```
/// use ndarray::arr2;
///
/// let a = arr2(&[[1., 1.],
/// [1., 2.],
/// [0., 3.],
/// [0., 4.]]);
///
/// let b = arr2(&[[0., 1.]]);
///
/// let c = arr2(&[[1., 2.],
/// [1., 3.],
/// [0., 4.],
/// [0., 5.]]);
/// // We can add because the shapes are compatible even if not equal.
/// // The `b` array is shape 1 × 2 but acts like a 4 × 2 array.
/// assert!(
/// c == a + b
/// );
/// ```
///
pub struct ArrayBase<S, D>
where S: Data
{
/// Rc data when used as view, Uniquely held data when being mutated
data: S,
/// A pointer into the buffer held by data, may point anywhere
/// in its range.
ptr: *mut S::Elem,
/// The size of each axis
dim: D,
/// The element count stride per axis. To be parsed as `isize`.
strides: D,
}
/// An array where the data has shared ownership and is copy on write.
/// It can act as both an owner as the data as well as a shared reference (view
/// like).
pub type RcArray<A, D> = ArrayBase<Rc<Vec<A>>, D>;
/// An array that owns its data uniquely.
///
/// `Array` is the main n-dimensional array type, and it owns all its array
/// elements.
///
/// [**`ArrayBase`**](struct.ArrayBase.html) is used to implement both the owned
/// arrays and the views; see its docs for an overview of all array features.
///
/// See also:
///
/// + [Constructor Methods for Owned Arrays](struct.ArrayBase.html#constructor-methods-for-owned-arrays)
/// + [Methods For All Array Types](struct.ArrayBase.html#methods-for-all-array-types)
/// + Dimensionality-specific type alises
/// [`Array1`](Array1.t.html),
/// [`Array2`](Array2.t.html),
/// [`Array3`](Array3.t.html) and so on.
pub type Array<A, D> = ArrayBase<Vec<A>, D>;
#[deprecated(note="Use the type alias `Array` instead")]
/// Array where the data is owned uniquely.
pub type OwnedArray<A, D> = ArrayBase<Vec<A>, D>;
/// A lightweight array view.
///
/// An array view represents an array or a part of it, created from
/// an iterator, subview or slice of an array.
///
/// Array views have all the methods of an array (see [`ArrayBase`][ab]).
///
/// See also [**Methods Specific To Array Views**](struct.ArrayBase.html#methods-specific-to-array-views)
///
/// [ab]: struct.ArrayBase.html
pub type ArrayView<'a, A, D> = ArrayBase<ViewRepr<&'a A>, D>;
/// A lightweight read-write array view.
///
/// An array view represents an array or a part of it, created from
/// an iterator, subview or slice of an array.
///
/// Array views have all the methods of an array (see [`ArrayBase`][ab]).
///
/// See also [**Methods Specific To Array Views**](struct.ArrayBase.html#methods-specific-to-array-views)
///
/// [ab]: struct.ArrayBase.html
pub type ArrayViewMut<'a, A, D> = ArrayBase<ViewRepr<&'a mut A>, D>;
/// Array view’s representation.
#[derive(Copy, Clone)]
// This is just a marker type, to carry the lifetime parameter.
pub struct ViewRepr<A> {
life: PhantomData<A>,
}
impl<A> ViewRepr<A> {
#[inline(always)]
fn new() -> Self {
ViewRepr { life: PhantomData }
}
}
mod impl_clone;
mod impl_constructors;
mod impl_methods;
mod impl_owned_array;
/// Private Methods
impl<A, S, D> ArrayBase<S, D>
where S: Data<Elem=A>, D: Dimension
{
#[inline]
fn broadcast_unwrap<E>(&self, dim: E) -> ArrayView<A, E>
where E: Dimension,
{
#[cold]
#[inline(never)]
fn broadcast_panic<D, E>(from: &D, to: &E) -> !
where D: Dimension,
E: Dimension,
{
panic!("ndarray: could not broadcast array from shape: {:?} to: {:?}",
from.slice(), to.slice())
}
match self.broadcast(dim.clone()) {
Some(it) => it,
None => broadcast_panic(&self.dim, &dim),
}
}
/// Apply closure `f` to each element in the array, in whatever
/// order is the fastest to visit.
fn unordered_foreach_mut<F>(&mut self, mut f: F)
where S: DataMut,
F: FnMut(&mut A)
{
if let Some(slc) = self.as_slice_memory_order_mut() {
// FIXME: Use for loop when slice iterator is perf is restored
for i in 0..slc.len() {
f(&mut slc[i]);
}
return;
}
for row in self.inner_iter_mut() {
row.into_iter_().fold((), |(), elt| f(elt));
}
}
}
mod impl_2d;
mod numeric;
pub mod linalg;
mod impl_ops;
pub use impl_ops::ScalarOperand;
// Array view methods
mod impl_views;
/// Private array view methods
impl<'a, A, D> ArrayBase<ViewRepr<&'a A>, D>
where D: Dimension,
{
/// Create a new `ArrayView`
///
/// Unsafe because: `ptr` must be valid for the given dimension and strides.
#[inline(always)]
unsafe fn new_(ptr: *const A, dim: D, strides: D) -> Self {
ArrayView {
data: ViewRepr::new(),
ptr: ptr as *mut A,
dim: dim,
strides: strides,
}
}
#[inline]
fn into_base_iter(self) -> Baseiter<'a, A, D> {
unsafe {
Baseiter::new(self.ptr, self.dim.clone(), self.strides.clone())
}
}
#[inline]
fn into_elements_base(self) -> ElementsBase<'a, A, D> {
ElementsBase { inner: self.into_base_iter() }
}
fn into_iter_(self) -> Iter<'a, A, D> {
Iter {
inner: if let Some(slc) = self.into_slice() {
ElementsRepr::Slice(slc.iter())
} else {
ElementsRepr::Counted(self.into_elements_base())
},
}
}
/// Return an outer iterator for this view.
#[doc(hidden)] // not official
#[deprecated(note="This method will be replaced.")]
pub fn into_outer_iter(self) -> AxisIter<'a, A, D::Smaller>
where D: RemoveAxis,
{
iterators::new_outer_iter(self)
}
}
impl<'a, A, D> ArrayBase<ViewRepr<&'a mut A>, D>
where D: Dimension,
{
/// Create a new `ArrayView`
///
/// Unsafe because: `ptr` must be valid for the given dimension and strides.
#[inline(always)]
unsafe fn new_(ptr: *mut A, dim: D, strides: D) -> Self {
ArrayViewMut {
data: ViewRepr::new(),
ptr: ptr,
dim: dim,
strides: strides,
}
}
#[inline]
fn into_base_iter(self) -> Baseiter<'a, A, D> {
unsafe {
Baseiter::new(self.ptr, self.dim.clone(), self.strides.clone())
}
}
#[inline]
fn into_elements_base(self) -> ElementsBaseMut<'a, A, D> {
ElementsBaseMut { inner: self.into_base_iter() }
}
fn into_iter_(self) -> IterMut<'a, A, D> {
IterMut {
inner:
if self.is_standard_layout() {
let slc = unsafe {
slice::from_raw_parts_mut(self.ptr, self.len())
};
ElementsRepr::Slice(slc.iter_mut())
} else {
ElementsRepr::Counted(self.into_elements_base())
}
}
}
/// Return an outer iterator for this view.
#[doc(hidden)] // not official
#[deprecated(note="This method will be replaced.")]
pub fn into_outer_iter(self) -> AxisIterMut<'a, A, D::Smaller>
where D: RemoveAxis,
{
iterators::new_outer_iter_mut(self)
}
}
/// An iterator over the elements of an array.
///
/// Iterator element type is `&'a A`.
///
/// See [`.iter()`](struct.ArrayBase.html#method.iter) for more information.
pub struct Iter<'a, A: 'a, D> {
inner: ElementsRepr<SliceIter<'a, A>, ElementsBase<'a, A, D>>,
}
/// Counted read only iterator
struct ElementsBase<'a, A: 'a, D> {
inner: Baseiter<'a, A, D>,
}
/// An iterator over the elements of an array (mutable).
///
/// Iterator element type is `&'a mut A`.
///
/// See [`.iter_mut()`](struct.ArrayBase.html#method.iter_mut) for more information.
pub struct IterMut<'a, A: 'a, D> {
inner: ElementsRepr<SliceIterMut<'a, A>, ElementsBaseMut<'a, A, D>>,
}
/// An iterator over the elements of an array.
///
/// Iterator element type is `&'a mut A`.
struct ElementsBaseMut<'a, A: 'a, D> {
inner: Baseiter<'a, A, D>,
}
/// An iterator over the indexes and elements of an array.
///
/// See [`.indexed_iter()`](struct.ArrayBase.html#method.indexed_iter) for more information.
#[derive(Clone)]
pub struct IndexedIter<'a, A: 'a, D>(ElementsBase<'a, A, D>);
/// An iterator over the indexes and elements of an array (mutable).
///
/// See [`.indexed_iter_mut()`](struct.ArrayBase.html#method.indexed_iter_mut) for more information.
pub struct IndexedIterMut<'a, A: 'a, D>(ElementsBaseMut<'a, A, D>);
fn zipsl<'a, 'b, A, B>(t: &'a [A], u: &'b [B])
-> Zip<SliceIter<'a, A>, SliceIter<'b, B>> {
t.iter().zip(u)
}
fn zipsl_mut<'a, 'b, A, B>(t: &'a mut [A], u: &'b mut [B])
-> Zip<SliceIterMut<'a, A>, SliceIterMut<'b, B>> {
t.iter_mut().zip(u)
}
use itertools::{cons_tuples, ConsTuples};
trait ZipExt : Iterator {
fn zip_cons<J>(self, iter: J) -> ConsTuples<Zip<Self, J::IntoIter>, (Self::Item, J::Item)>
where J: IntoIterator,
Self: Sized,
{
cons_tuples(self.zip(iter))
}
}
impl<I> ZipExt for I where I: Iterator { }
enum ElementsRepr<S, C> {
Slice(S),
Counted(C),
}
/// A contiguous array shape of n dimensions.
///
/// Either c- or f- memory ordered (*c* a.k.a *row major* is the default).
#[derive(Copy, Clone, Debug)]
pub struct Shape<D> {
dim: D,
is_c: bool,
}
/// An array shape of n dimensions in c-order, f-order or custom strides.
#[derive(Copy, Clone, Debug)]
pub struct StrideShape<D> {
dim: D,
strides: D,
custom: bool,
}
|
//! Easily hash and verify passwords using bcrypt
use rand::{rngs::OsRng, RngCore};
use std::convert::AsRef;
use std::fmt;
use std::str::FromStr;
mod b64;
mod bcrypt;
mod errors;
pub use crate::bcrypt::bcrypt;
pub use crate::errors::{BcryptError, BcryptResult};
// Cost constants
const MIN_COST: u32 = 4;
const MAX_COST: u32 = 31;
pub const DEFAULT_COST: u32 = 12;
#[derive(Debug, PartialEq)]
/// A bcrypt hash result before concatenating
pub struct HashParts {
cost: u32,
salt: String,
hash: String,
}
/// BCrypt hash version
/// https://en.wikipedia.org/wiki/Bcrypt#Versioning_history
pub enum Version {
TwoA,
TwoX,
TwoY,
TwoB,
}
impl HashParts {
/// Creates the bcrypt hash string from all its parts
fn format(self) -> String {
self.format_for_version(Version::TwoB)
}
/// Get the bcrypt hash cost
pub fn get_cost(&self) -> u32 {
self.cost
}
/// Get the bcrypt hash salt
pub fn get_salt(&self) -> String {
self.salt.clone()
}
/// Creates the bcrypt hash string from all its part, allowing to customize the version.
pub fn format_for_version(&self, version: Version) -> String {
// Cost need to have a length of 2 so padding with a 0 if cost < 10
format!("${}${:02}${}{}", version, self.cost, self.salt, self.hash)
}
}
impl FromStr for HashParts {
type Err = BcryptError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
split_hash(s)
}
}
impl ToString for HashParts {
fn to_string(&self) -> String {
self.format_for_version(Version::TwoY)
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = match self {
Version::TwoA => "2a",
Version::TwoB => "2b",
Version::TwoX => "2x",
Version::TwoY => "2y",
};
write!(f, "{}", str)
}
}
/// The main meat: actually does the hashing and does some verification with
/// the cost to ensure it's a correct one
fn _hash_password(password: &[u8], cost: u32, salt: &[u8]) -> BcryptResult<HashParts> {
if cost > MAX_COST || cost < MIN_COST {
return Err(BcryptError::CostNotAllowed(cost));
}
if password.contains(&0u8) {
return Err(BcryptError::InvalidPassword);
}
// Output is 24
let mut output = [0u8; 24];
// Passwords need to be null terminated
let mut vec: Vec<u8> = Vec::new();
vec.extend_from_slice(password);
vec.push(0);
// We only consider the first 72 chars; truncate if necessary.
// `bcrypt` below will panic if len > 72
let truncated = if vec.len() > 72 { &vec[..72] } else { &vec };
bcrypt::bcrypt(cost, salt, truncated, &mut output);
Ok(HashParts {
cost,
salt: b64::encode(salt),
hash: b64::encode(&output[..23]), // remember to remove the last byte
})
}
/// Takes a full hash and split it into 3 parts:
/// cost, salt and hash
fn split_hash(hash: &str) -> BcryptResult<HashParts> {
let mut parts = HashParts {
cost: 0,
salt: "".to_string(),
hash: "".to_string(),
};
// Should be [prefix, cost, hash]
let raw_parts: Vec<_> = hash.split('$').filter(|s| !s.is_empty()).collect();
if raw_parts.len() != 3 {
return Err(BcryptError::InvalidHash(hash.to_string()));
}
if raw_parts[0] != "2y" && raw_parts[0] != "2b" && raw_parts[0] != "2a" {
return Err(BcryptError::InvalidPrefix(raw_parts[0].to_string()));
}
if let Ok(c) = raw_parts[1].parse::<u32>() {
parts.cost = c;
} else {
return Err(BcryptError::InvalidCost(raw_parts[1].to_string()));
}
if raw_parts[2].len() == 53 {
parts.salt = raw_parts[2][..22].chars().collect();
parts.hash = raw_parts[2][22..].chars().collect();
} else {
return Err(BcryptError::InvalidHash(hash.to_string()));
}
Ok(parts)
}
/// Generates a password hash using the cost given.
/// The salt is generated randomly using the OS randomness
pub fn hash<P: AsRef<[u8]>>(password: P, cost: u32) -> BcryptResult<String> {
hash_with_result(password, cost).map(|r| r.format())
}
/// Generates a password hash using the cost given.
/// The salt is generated randomly using the OS randomness.
/// The function returns a result structure and allows to format the hash in different versions.
pub fn hash_with_result<P: AsRef<[u8]>>(password: P, cost: u32) -> BcryptResult<HashParts> {
let salt = {
let mut s = [0u8; 16];
OsRng.fill_bytes(&mut s);
s
};
_hash_password(password.as_ref(), cost, salt.as_ref())
}
/// Generates a password given a hash and a cost.
/// The function returns a result structure and allows to format the hash in different versions.
pub fn hash_with_salt<P: AsRef<[u8]>>(password: P, cost: u32, salt: &[u8]) -> BcryptResult<HashParts> {
_hash_password(password.as_ref(), cost, salt)
}
/// Verify that a password is equivalent to the hash provided
pub fn verify<P: AsRef<[u8]>>(password: P, hash: &str) -> BcryptResult<bool> {
let parts = split_hash(hash)?;
let salt = b64::decode(&parts.salt)?;
let generated = _hash_password(password.as_ref(), parts.cost, &salt)?;
let source_decoded = b64::decode(&parts.hash)?;
let generated_decoded = b64::decode(&generated.hash)?;
if source_decoded.len() != generated_decoded.len() {
return Ok(false);
}
for (a, b) in source_decoded.into_iter().zip(generated_decoded) {
if a != b {
return Ok(false);
}
}
Ok(true)
}
#[cfg(test)]
mod tests {
use super::{
_hash_password, hash, hash_with_salt, split_hash, verify, BcryptError, BcryptResult, HashParts, Version,
DEFAULT_COST,
};
use quickcheck::{quickcheck, TestResult};
use std::iter;
use std::str::FromStr;
#[test]
fn can_split_hash() {
let hash = "$2y$12$L6Bc/AlTQHyd9liGgGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
let output = split_hash(hash).unwrap();
let expected = HashParts {
cost: 12,
salt: "L6Bc/AlTQHyd9liGgGEZyO".to_string(),
hash: "FLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u".to_string(),
};
assert_eq!(output, expected);
}
#[test]
fn can_output_cost_and_salt_from_parsed_hash() {
let hash = "$2y$12$L6Bc/AlTQHyd9liGgGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
let parsed = HashParts::from_str(hash).unwrap();
assert_eq!(parsed.get_cost(), 12);
assert_eq!(parsed.get_salt(), "L6Bc/AlTQHyd9liGgGEZyO".to_string());
}
#[test]
fn can_verify_hash_generated_from_some_online_tool() {
let hash = "$2a$04$UuTkLRZZ6QofpDOlMz32MuuxEHA43WOemOYHPz6.SjsVsyO1tDU96";
assert!(verify("password", hash).unwrap());
}
#[test]
fn can_verify_hash_generated_from_python() {
let hash = "$2b$04$EGdrhbKUv8Oc9vGiXX0HQOxSg445d458Muh7DAHskb6QbtCvdxcie";
assert!(verify("correctbatteryhorsestapler", hash).unwrap());
}
#[test]
fn can_verify_hash_generated_from_node() {
let hash = "$2a$04$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk5bektyVVa5xnIi";
assert!(verify("correctbatteryhorsestapler", hash).unwrap());
}
#[test]
fn a_wrong_password_is_false() {
let hash = "$2b$04$EGdrhbKUv8Oc9vGiXX0HQOxSg445d458Muh7DAHskb6QbtCvdxcie";
assert!(!verify("wrong", hash).unwrap());
}
#[test]
fn errors_with_invalid_hash() {
// there is another $ in the hash part
let hash = "$2a$04$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk$5bektyVVa5xnIi";
assert!(verify("correctbatteryhorsestapler", hash).is_err());
}
#[test]
fn errors_with_non_number_cost() {
// the cost is not a number
let hash = "$2a$ab$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk$5bektyVVa5xnIi";
assert!(verify("correctbatteryhorsestapler", hash).is_err());
}
#[test]
fn errors_with_a_hash_too_long() {
// the cost is not a number
let hash = "$2a$04$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk$5bektyVVa5xnIerererereri";
assert!(verify("correctbatteryhorsestapler", hash).is_err());
}
#[test]
fn can_verify_own_generated() {
let hashed = hash("hunter2", 4).unwrap();
assert_eq!(true, verify("hunter2", &hashed).unwrap());
}
#[test]
fn long_passwords_truncate_correctly() {
// produced with python -c 'import bcrypt; bcrypt.hashpw(b"x"*100, b"$2a$05$...............................")'
let hash = "$2a$05$......................YgIDy4hFBdVlc/6LHnD9mX488r9cLd2";
assert!(verify(iter::repeat("x").take(100).collect::<String>(), hash).unwrap());
}
#[test]
fn generate_versions() {
let password = "hunter2".as_bytes();
let salt = vec![0; 16];
let result = _hash_password(password, DEFAULT_COST, salt.as_slice()).unwrap();
assert_eq!(
"$2a$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoA)
);
assert_eq!(
"$2b$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoB)
);
assert_eq!(
"$2x$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoX)
);
assert_eq!(
"$2y$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoY)
);
let hash = result.to_string();
assert_eq!(true, verify("hunter2", &hash).unwrap());
}
#[test]
fn forbid_null_bytes() {
fn assert_invalid_password(password: &[u8]) {
match hash(password, DEFAULT_COST) {
Ok(_) => panic!(format!(
"NULL bytes must be forbidden, but {:?} is allowed.",
password
)),
Err(BcryptError::InvalidPassword) => {}
Err(e) => panic!(format!(
"NULL bytes are forbidden but error differs: {} for {:?}.",
e, password
)),
}
}
assert_invalid_password("\0".as_bytes());
assert_invalid_password("\0\0\0\0\0\0\0\0".as_bytes());
assert_invalid_password("passw0rd\0".as_bytes());
assert_invalid_password("passw0rd\0with tail".as_bytes());
assert_invalid_password("\0passw0rd".as_bytes());
}
#[test]
fn hash_with_fixed_salt() {
let salt = vec![38, 113, 212, 141, 108, 213, 195, 166,
201, 38, 20, 13, 47, 40, 104, 18];
let hashed = hash_with_salt("My S3cre7 P@55w0rd!", 5, &salt).unwrap().to_string();
assert_eq!("$2y$05$HlFShUxTu4ZHHfOLJwfmCeDj/kuKFKboanXtDJXxCC7aIPTUgxNDe", &hashed);
}
quickcheck! {
fn can_verify_arbitrary_own_generated(pass: Vec<u8>) -> BcryptResult<bool> {
let mut pass = pass;
pass.retain(|&b| b != 0);
let hashed = hash(&pass, 4)?;
verify(pass, &hashed)
}
fn doesnt_verify_different_passwords(a: Vec<u8>, b: Vec<u8>) -> BcryptResult<TestResult> {
let mut a = a;
a.retain(|&b| b != 0);
let mut b = b;
b.retain(|&b| b != 0);
if a == b {
return Ok(TestResult::discard());
}
let hashed = hash(a, 4)?;
Ok(TestResult::from_bool(!verify(b, &hashed)?))
}
}
}
Test if the bcrypt hash parsing returns an error if there is bad formatting
//! Easily hash and verify passwords using bcrypt
use rand::{rngs::OsRng, RngCore};
use std::convert::AsRef;
use std::fmt;
use std::str::FromStr;
mod b64;
mod bcrypt;
mod errors;
pub use crate::bcrypt::bcrypt;
pub use crate::errors::{BcryptError, BcryptResult};
// Cost constants
const MIN_COST: u32 = 4;
const MAX_COST: u32 = 31;
pub const DEFAULT_COST: u32 = 12;
#[derive(Debug, PartialEq)]
/// A bcrypt hash result before concatenating
pub struct HashParts {
cost: u32,
salt: String,
hash: String,
}
/// BCrypt hash version
/// https://en.wikipedia.org/wiki/Bcrypt#Versioning_history
pub enum Version {
TwoA,
TwoX,
TwoY,
TwoB,
}
impl HashParts {
/// Creates the bcrypt hash string from all its parts
fn format(self) -> String {
self.format_for_version(Version::TwoB)
}
/// Get the bcrypt hash cost
pub fn get_cost(&self) -> u32 {
self.cost
}
/// Get the bcrypt hash salt
pub fn get_salt(&self) -> String {
self.salt.clone()
}
/// Creates the bcrypt hash string from all its part, allowing to customize the version.
pub fn format_for_version(&self, version: Version) -> String {
// Cost need to have a length of 2 so padding with a 0 if cost < 10
format!("${}${:02}${}{}", version, self.cost, self.salt, self.hash)
}
}
impl FromStr for HashParts {
type Err = BcryptError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
split_hash(s)
}
}
impl ToString for HashParts {
fn to_string(&self) -> String {
self.format_for_version(Version::TwoY)
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let str = match self {
Version::TwoA => "2a",
Version::TwoB => "2b",
Version::TwoX => "2x",
Version::TwoY => "2y",
};
write!(f, "{}", str)
}
}
/// The main meat: actually does the hashing and does some verification with
/// the cost to ensure it's a correct one
fn _hash_password(password: &[u8], cost: u32, salt: &[u8]) -> BcryptResult<HashParts> {
if cost > MAX_COST || cost < MIN_COST {
return Err(BcryptError::CostNotAllowed(cost));
}
if password.contains(&0u8) {
return Err(BcryptError::InvalidPassword);
}
// Output is 24
let mut output = [0u8; 24];
// Passwords need to be null terminated
let mut vec: Vec<u8> = Vec::new();
vec.extend_from_slice(password);
vec.push(0);
// We only consider the first 72 chars; truncate if necessary.
// `bcrypt` below will panic if len > 72
let truncated = if vec.len() > 72 { &vec[..72] } else { &vec };
bcrypt::bcrypt(cost, salt, truncated, &mut output);
Ok(HashParts {
cost,
salt: b64::encode(salt),
hash: b64::encode(&output[..23]), // remember to remove the last byte
})
}
/// Takes a full hash and split it into 3 parts:
/// cost, salt and hash
fn split_hash(hash: &str) -> BcryptResult<HashParts> {
let mut parts = HashParts {
cost: 0,
salt: "".to_string(),
hash: "".to_string(),
};
// Should be [prefix, cost, hash]
let raw_parts: Vec<_> = hash.split('$').filter(|s| !s.is_empty()).collect();
if raw_parts.len() != 3 {
return Err(BcryptError::InvalidHash(hash.to_string()));
}
if raw_parts[0] != "2y" && raw_parts[0] != "2b" && raw_parts[0] != "2a" {
return Err(BcryptError::InvalidPrefix(raw_parts[0].to_string()));
}
if let Ok(c) = raw_parts[1].parse::<u32>() {
parts.cost = c;
} else {
return Err(BcryptError::InvalidCost(raw_parts[1].to_string()));
}
if raw_parts[2].len() == 53 {
parts.salt = raw_parts[2][..22].chars().collect();
parts.hash = raw_parts[2][22..].chars().collect();
} else {
return Err(BcryptError::InvalidHash(hash.to_string()));
}
Ok(parts)
}
/// Generates a password hash using the cost given.
/// The salt is generated randomly using the OS randomness
pub fn hash<P: AsRef<[u8]>>(password: P, cost: u32) -> BcryptResult<String> {
hash_with_result(password, cost).map(|r| r.format())
}
/// Generates a password hash using the cost given.
/// The salt is generated randomly using the OS randomness.
/// The function returns a result structure and allows to format the hash in different versions.
pub fn hash_with_result<P: AsRef<[u8]>>(password: P, cost: u32) -> BcryptResult<HashParts> {
let salt = {
let mut s = [0u8; 16];
OsRng.fill_bytes(&mut s);
s
};
_hash_password(password.as_ref(), cost, salt.as_ref())
}
/// Generates a password given a hash and a cost.
/// The function returns a result structure and allows to format the hash in different versions.
pub fn hash_with_salt<P: AsRef<[u8]>>(password: P, cost: u32, salt: &[u8]) -> BcryptResult<HashParts> {
_hash_password(password.as_ref(), cost, salt)
}
/// Verify that a password is equivalent to the hash provided
pub fn verify<P: AsRef<[u8]>>(password: P, hash: &str) -> BcryptResult<bool> {
let parts = split_hash(hash)?;
let salt = b64::decode(&parts.salt)?;
let generated = _hash_password(password.as_ref(), parts.cost, &salt)?;
let source_decoded = b64::decode(&parts.hash)?;
let generated_decoded = b64::decode(&generated.hash)?;
if source_decoded.len() != generated_decoded.len() {
return Ok(false);
}
for (a, b) in source_decoded.into_iter().zip(generated_decoded) {
if a != b {
return Ok(false);
}
}
Ok(true)
}
#[cfg(test)]
mod tests {
use super::{
_hash_password, hash, hash_with_salt, split_hash, verify, BcryptError, BcryptResult, HashParts, Version,
DEFAULT_COST,
};
use quickcheck::{quickcheck, TestResult};
use std::iter;
use std::str::FromStr;
#[test]
fn can_split_hash() {
let hash = "$2y$12$L6Bc/AlTQHyd9liGgGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
let output = split_hash(hash).unwrap();
let expected = HashParts {
cost: 12,
salt: "L6Bc/AlTQHyd9liGgGEZyO".to_string(),
hash: "FLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u".to_string(),
};
assert_eq!(output, expected);
}
#[test]
fn can_output_cost_and_salt_from_parsed_hash() {
let hash = "$2y$12$L6Bc/AlTQHyd9liGgGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
let parsed = HashParts::from_str(hash).unwrap();
assert_eq!(parsed.get_cost(), 12);
assert_eq!(parsed.get_salt(), "L6Bc/AlTQHyd9liGgGEZyO".to_string());
}
#[test]
fn returns_an_error_if_a_parsed_hash_is_baddly_formated() {
let hash1 = "$2y$12$L6Bc/AlTQHyd9lGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
assert!(HashParts::from_str(hash1).is_err());
let hash2 = "!2y$12$L6Bc/AlTQHyd9liGgGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
assert!(HashParts::from_str(hash2).is_err());
let hash3 = "$2y$-12$L6Bc/AlTQHyd9liGgGEZyOFLPHNgyxeEPfgYfBCVxJ7JIlwxyVU3u";
assert!(HashParts::from_str(hash3).is_err());
}
#[test]
fn can_verify_hash_generated_from_some_online_tool() {
let hash = "$2a$04$UuTkLRZZ6QofpDOlMz32MuuxEHA43WOemOYHPz6.SjsVsyO1tDU96";
assert!(verify("password", hash).unwrap());
}
#[test]
fn can_verify_hash_generated_from_python() {
let hash = "$2b$04$EGdrhbKUv8Oc9vGiXX0HQOxSg445d458Muh7DAHskb6QbtCvdxcie";
assert!(verify("correctbatteryhorsestapler", hash).unwrap());
}
#[test]
fn can_verify_hash_generated_from_node() {
let hash = "$2a$04$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk5bektyVVa5xnIi";
assert!(verify("correctbatteryhorsestapler", hash).unwrap());
}
#[test]
fn a_wrong_password_is_false() {
let hash = "$2b$04$EGdrhbKUv8Oc9vGiXX0HQOxSg445d458Muh7DAHskb6QbtCvdxcie";
assert!(!verify("wrong", hash).unwrap());
}
#[test]
fn errors_with_invalid_hash() {
// there is another $ in the hash part
let hash = "$2a$04$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk$5bektyVVa5xnIi";
assert!(verify("correctbatteryhorsestapler", hash).is_err());
}
#[test]
fn errors_with_non_number_cost() {
// the cost is not a number
let hash = "$2a$ab$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk$5bektyVVa5xnIi";
assert!(verify("correctbatteryhorsestapler", hash).is_err());
}
#[test]
fn errors_with_a_hash_too_long() {
// the cost is not a number
let hash = "$2a$04$n4Uy0eSnMfvnESYL.bLwuuj0U/ETSsoTpRT9GVk$5bektyVVa5xnIerererereri";
assert!(verify("correctbatteryhorsestapler", hash).is_err());
}
#[test]
fn can_verify_own_generated() {
let hashed = hash("hunter2", 4).unwrap();
assert_eq!(true, verify("hunter2", &hashed).unwrap());
}
#[test]
fn long_passwords_truncate_correctly() {
// produced with python -c 'import bcrypt; bcrypt.hashpw(b"x"*100, b"$2a$05$...............................")'
let hash = "$2a$05$......................YgIDy4hFBdVlc/6LHnD9mX488r9cLd2";
assert!(verify(iter::repeat("x").take(100).collect::<String>(), hash).unwrap());
}
#[test]
fn generate_versions() {
let password = "hunter2".as_bytes();
let salt = vec![0; 16];
let result = _hash_password(password, DEFAULT_COST, salt.as_slice()).unwrap();
assert_eq!(
"$2a$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoA)
);
assert_eq!(
"$2b$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoB)
);
assert_eq!(
"$2x$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoX)
);
assert_eq!(
"$2y$12$......................21jzCB1r6pN6rp5O2Ev0ejjTAboskKm",
result.format_for_version(Version::TwoY)
);
let hash = result.to_string();
assert_eq!(true, verify("hunter2", &hash).unwrap());
}
#[test]
fn forbid_null_bytes() {
fn assert_invalid_password(password: &[u8]) {
match hash(password, DEFAULT_COST) {
Ok(_) => panic!(format!(
"NULL bytes must be forbidden, but {:?} is allowed.",
password
)),
Err(BcryptError::InvalidPassword) => {}
Err(e) => panic!(format!(
"NULL bytes are forbidden but error differs: {} for {:?}.",
e, password
)),
}
}
assert_invalid_password("\0".as_bytes());
assert_invalid_password("\0\0\0\0\0\0\0\0".as_bytes());
assert_invalid_password("passw0rd\0".as_bytes());
assert_invalid_password("passw0rd\0with tail".as_bytes());
assert_invalid_password("\0passw0rd".as_bytes());
}
#[test]
fn hash_with_fixed_salt() {
let salt = vec![38, 113, 212, 141, 108, 213, 195, 166,
201, 38, 20, 13, 47, 40, 104, 18];
let hashed = hash_with_salt("My S3cre7 P@55w0rd!", 5, &salt).unwrap().to_string();
assert_eq!("$2y$05$HlFShUxTu4ZHHfOLJwfmCeDj/kuKFKboanXtDJXxCC7aIPTUgxNDe", &hashed);
}
quickcheck! {
fn can_verify_arbitrary_own_generated(pass: Vec<u8>) -> BcryptResult<bool> {
let mut pass = pass;
pass.retain(|&b| b != 0);
let hashed = hash(&pass, 4)?;
verify(pass, &hashed)
}
fn doesnt_verify_different_passwords(a: Vec<u8>, b: Vec<u8>) -> BcryptResult<TestResult> {
let mut a = a;
a.retain(|&b| b != 0);
let mut b = b;
b.retain(|&b| b != 0);
if a == b {
return Ok(TestResult::discard());
}
let hashed = hash(a, 4)?;
Ok(TestResult::from_bool(!verify(b, &hashed)?))
}
}
}
|
//! Client library for the [Discord](https://discordapp.com) API.
//!
//! The Discord API can be divided into three main components: the RESTful API
//! to which calls can be made to take actions, a websocket-based permanent
//! connection over which state updates are received, and the voice calling
//! system.
//!
//! Log in to Discord with `Discord::new`, `new_cache`, or `from_bot_token` as appropriate.
//! The resulting value can be used to make REST API calls to post messages and manipulate Discord
//! state. Calling `connect()` will open a websocket connection, through which events can be
//! received. These two channels are enough to write a simple chatbot which can
//! read and respond to messages.
//!
//! For more in-depth tracking of Discord state, a `State` can be seeded with
//! the `ReadyEvent` obtained when opening a `Connection` and kept updated with
//! the events received over it.
//!
#![cfg_attr(not(feature="voice"), doc="*<b>NOTE</b>: The library has been compiled without voice support.*")]
//! To join voice servers, call `Connection::voice` to get a `VoiceConnection` and use `connect`
//! to join a channel, then `play` and `stop` to control playback. Manipulating deaf/mute state
//! and receiving audio are also possible.
//!
//! For examples, see the `examples` directory in the source tree.
#![warn(missing_docs)]
extern crate hyper;
extern crate serde_json;
extern crate websocket;
#[macro_use]
extern crate bitflags;
extern crate byteorder;
#[cfg(feature="voice")]
extern crate opus;
extern crate time;
#[macro_use]
extern crate log;
#[cfg(feature="voice")]
extern crate sodiumoxide;
extern crate multipart;
extern crate base64;
use std::collections::BTreeMap;
use serde_json::builder::ObjectBuilder;
mod error;
mod connection;
mod state;
#[cfg(feature="voice")]
pub mod voice;
pub mod model;
pub use error::{Result, Error};
pub use connection::Connection;
pub use state::{State, ChannelRef};
use model::*;
const USER_AGENT: &'static str = concat!("DiscordBot (https://github.com/SpaceManiac/discord-rs, ", env!("CARGO_PKG_VERSION"), ")");
const API_BASE: &'static str = "https://discordapp.com/api/v6";
const STATUS_BASE: &'static str = "https://status.discordapp.com";
/// Client for the Discord REST API.
///
/// Log in to the API with a user's email and password using `new()`. Call
/// `connect()` to create a `Connection` on which to receive events. If desired,
/// use `logout()` to invalidate the token when done. Other methods manipulate
/// the Discord REST API.
pub struct Discord {
client: hyper::Client,
token: String,
}
impl Discord {
/// Log in to the Discord Rest API and acquire a token.
pub fn new(email: &str, password: &str) -> Result<Discord> {
let mut map = BTreeMap::new();
map.insert("email", email);
map.insert("password", password);
let client = hyper::Client::new();
let response = try!(check_status(client.post(&format!("{}/auth/login", API_BASE))
.header(hyper::header::ContentType::json())
.header(hyper::header::UserAgent(USER_AGENT.to_owned()))
.body(&try!(serde_json::to_string(&map)))
.send()));
let mut json: BTreeMap<String, String> = try!(serde_json::from_reader(response));
let token = match json.remove("token") {
Some(token) => token,
None => return Err(Error::Protocol("Response missing \"token\" in Discord::new()"))
};
Ok(Discord {
client: client,
token: token,
})
}
/// Log in to the Discord Rest API, possibly using a cached login token.
///
/// Cached login tokens are keyed to the email address and will be read from
/// and written to the specified path. If no cached token was found and no
/// password was specified, an error is returned.
pub fn new_cache<P: AsRef<std::path::Path>>(path: P, email: &str, password: Option<&str>) -> Result<Discord> {
use std::io::{Write, BufRead, BufReader};
use std::fs::File;
// Read the cache, looking for our token
let path = path.as_ref();
let mut initial_token: Option<String> = None;
if let Ok(file) = File::open(path) {
for line in BufReader::new(file).lines() {
let line = try!(line);
let parts: Vec<_> = line.split('\t').collect();
if parts.len() == 2 && parts[0] == email {
initial_token = Some(parts[1].trim().into());
break;
}
}
}
// Perform the login
let discord = if let Some(ref initial_token) = initial_token {
let mut map = BTreeMap::new();
map.insert("email", email);
if let Some(password) = password {
map.insert("password", password);
}
let client = hyper::Client::new();
let response = try!(check_status(client.post(&format!("{}/auth/login", API_BASE))
.header(hyper::header::ContentType::json())
.header(hyper::header::UserAgent(USER_AGENT.to_owned()))
.header(hyper::header::Authorization(initial_token.clone()))
.body(&try!(serde_json::to_string(&map)))
.send()));
let mut json: BTreeMap<String, String> = try!(serde_json::from_reader(response));
let token = match json.remove("token") {
Some(token) => token,
None => return Err(Error::Protocol("Response missing \"token\" in Discord::new()"))
};
Discord {
client: client,
token: token,
}
} else {
if let Some(password) = password {
try!(Discord::new(email, password))
} else {
return Err(Error::Other("No password was specified and no cached token was found"))
}
};
// Write the token back out, if needed
if initial_token.as_ref() != Some(&discord.token) {
let mut tokens = Vec::new();
tokens.push(format!("{}\t{}", email, discord.token));
if let Ok(file) = File::open(path) {
for line in BufReader::new(file).lines() {
let line = try!(line);
if line.split('\t').next() != Some(email) {
tokens.push(line);
}
}
}
let mut file = try!(File::create(path));
for line in tokens {
try!(file.write_all(line.as_bytes()));
try!(file.write_all(&[b'\n']));
}
}
Ok(discord)
}
/// Log in as a bot account using the given authentication token.
pub fn from_bot_token(token: &str) -> Result<Discord> {
Ok(Discord {
client: hyper::Client::new(),
token: format!("Bot {}", token),
})
}
/// Log out from the Discord API, invalidating this clients's token.
pub fn logout(self) -> Result<()> {
try!(self.request(|| self.client.post(&format!("{}/auth/logout", API_BASE))));
Ok(())
}
fn request<'a, F: Fn() -> hyper::client::RequestBuilder<'a>>(&self, f: F) -> Result<hyper::client::Response> {
retry(|| f()
.header(hyper::header::ContentType::json())
.header(hyper::header::Authorization(self.token.clone())))
}
/// Create a channel.
pub fn create_channel(&self, server: &ServerId, name: &str, kind: ChannelType) -> Result<Channel> {
let map = ObjectBuilder::new()
.insert("name", name)
.insert("type", kind.name())
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/guilds/{}/channels", API_BASE, server.0)).body(&body)));
Channel::decode(try!(serde_json::from_reader(response)))
}
/// Get information about a channel.
pub fn get_channel(&self, channel: ChannelId) -> Result<Channel> {
let response = try!(self.request(||
self.client.get(&format!("{}/channels/{}", API_BASE, channel.0))));
Channel::decode(try!(serde_json::from_reader(response)))
}
/// Edit a channel's details. See `EditChannel` for the editable fields.
///
/// ```ignore
/// // Edit a channel's name and topic
/// discord.edit_channel(channel_id, "general", |ch| ch
/// .topic("Welcome to the general chat!")
/// );
/// ```
pub fn edit_channel<F: FnOnce(EditChannel) -> EditChannel>(&self, channel: ChannelId, f: F) -> Result<PublicChannel> {
// Work around the fact that this supposed PATCH call actually requires all fields
let map = match try!(self.get_channel(channel)) {
Channel::Private(_) => return Err(Error::Other("Can not edit private channels")),
Channel::Public(channel) => {
let map = ObjectBuilder::new()
.insert("name", channel.name)
.insert("position", channel.position);
match channel.kind {
ChannelType::Text => map.insert("topic", channel.topic),
ChannelType::Voice => map.insert("bitrate", channel.bitrate).insert("user_limit", channel.user_limit),
_ => return Err(Error::Other(stringify!(format!("Unreachable channel type: {:?}", channel.kind)))),
}
},
Channel::Group(group) => ObjectBuilder::new().insert("name", group.name),
};
let map = f(EditChannel(map)).0.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/channels/{}", API_BASE, channel.0)).body(&body)));
PublicChannel::decode(try!(serde_json::from_reader(response)))
}
/// Delete a channel.
pub fn delete_channel(&self, channel: &ChannelId) -> Result<Channel> {
let response = try!(self.request(||
self.client.delete(&format!("{}/channels/{}", API_BASE, channel.0))));
Channel::decode(try!(serde_json::from_reader(response)))
}
/// Indicate typing on a channel for the next 5 seconds.
pub fn broadcast_typing(&self, channel: &ChannelId) -> Result<()> {
try!(self.request(|| self.client.post(&format!("{}/channels/{}/typing", API_BASE, channel.0))));
Ok(())
}
/// Get a single message by ID from a given channel.
pub fn get_message(&self, channel: ChannelId, message: MessageId) -> Result<Message> {
let response = try!(self.request(|| self.client.get(&format!("{}/channels/{}/messages/{}", API_BASE, channel.0, message.0))));
Message::decode(try!(serde_json::from_reader(response)))
}
/// Get messages in the backlog for a given channel.
///
/// The `what` argument should be one of the options in the `GetMessages`
/// enum, and will determine which messages will be returned. A message
/// limit can also be specified, and defaults to 50. More recent messages
/// will appear first in the list.
pub fn get_messages(&self, channel: ChannelId, what: GetMessages, limit: Option<u64>) -> Result<Vec<Message>> {
use std::fmt::Write;
let mut url = format!("{}/channels/{}/messages?limit={}", API_BASE, channel.0, limit.unwrap_or(50));
match what {
GetMessages::MostRecent => {},
GetMessages::Before(id) => { let _ = write!(url, "&before={}", id.0); },
GetMessages::After(id) => { let _ = write!(url, "&after={}", id.0); },
GetMessages::Around(id) => { let _ = write!(url, "&around={}", id.0); },
}
let response = try!(self.request(|| self.client.get(&url)));
decode_array(try!(serde_json::from_reader(response)), Message::decode)
}
/// Gets the pinned messages for a given channel.
pub fn get_pinned_messages(&self, channel: ChannelId) -> Result<Vec<Message>> {
let response = try!(self.request(|| self.client.get(
&format!("{}/channels/{}/pins", API_BASE, channel.0))));
let value = try!(serde_json::from_reader(response));
decode_array(value, Message::decode)
}
/// Pin the given message to the given channel.
///
/// Requires that the logged in account have the "MANAGE_MESSAGES" permission.
pub fn pin_message(&self, channel: ChannelId, message: MessageId) -> Result<()> {
try!(self.request(|| self.client.put(
&format!("{}/channels/{}/pins/{}", API_BASE, channel.0, message.0))));
Ok(())
}
/// Removes the given message from being pinned to the given channel.
///
/// Requires that the logged in account have the "MANAGE_MESSAGES" permission.
pub fn unpin_message(&self, channel: ChannelId, message: MessageId) -> Result<()> {
try!(self.request(|| self.client.delete(
&format!("{}/channels/{}/pins/{}", API_BASE, channel.0, message.0))));
Ok(())
}
/// Send a message to a given channel.
///
/// The `nonce` will be returned in the result and also transmitted to other
/// clients. The empty string is a good default if you don't care.
pub fn send_message(&self, channel: &ChannelId, text: &str, nonce: &str, tts: bool) -> Result<Message> {
let map = ObjectBuilder::new()
.insert("content", text)
.insert("nonce", nonce)
.insert("tts", tts)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/channels/{}/messages", API_BASE, channel.0)).body(&body)));
Message::decode(try!(serde_json::from_reader(response)))
}
/// Edit a previously posted message.
///
/// Requires that either the message was posted by this user, or this user
/// has permission to manage other members' messages.
pub fn edit_message(&self, channel: &ChannelId, message: &MessageId, text: &str) -> Result<Message> {
let map = ObjectBuilder::new()
.insert("content", text)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/channels/{}/messages/{}", API_BASE, channel.0, message.0)).body(&body)));
Message::decode(try!(serde_json::from_reader(response)))
}
/// Delete a previously posted message.
///
/// Requires that either the message was posted by this user, or this user
/// has permission to manage other members' messages.
pub fn delete_message(&self, channel: &ChannelId, message: &MessageId) -> Result<()> {
try!(self.request(||
self.client.delete(&format!("{}/channels/{}/messages/{}", API_BASE, channel.0, message.0))));
Ok(())
}
/// Bulk deletes a list of `MessageId`s from a given channel.
///
/// A minimum of 2 unique messages and a maximum of 100 unique messages may
/// be supplied, otherwise an `Error::Other` will be returned.
///
/// Each MessageId *should* be unique as duplicates will be removed from the
/// array before being sent to the Discord API.
///
/// Only bots can use this endpoint. Regular user accounts can not use this
/// endpoint under any circumstance.
///
/// Requires that either the message was posted by this user, or this user
/// has permission to manage other members' messages.
pub fn delete_messages(&self, channel: ChannelId, messages: &[MessageId]) -> Result<()> {
// Create a Vec of the underlying u64's of the message ids, then remove
// duplicates in it.
let mut ids: Vec<u64> = messages.into_iter().map(|m| m.0).collect();
ids.sort();
ids.dedup();
if ids.len() < 2 {
return Err(Error::Other("A minimum of 2 message ids must be supplied"));
} else if ids.len() > 100 {
return Err(Error::Other("A maximum of 100 message ids may be supplied"));
}
let map = ObjectBuilder::new()
.insert("messages", ids)
.build();
let body = try!(serde_json::to_string(&map));
try!(self.request(|| self.client.post(
&format!("{}/channels/{}/messages/bulk_delete", API_BASE, channel.0)).body(&body)));
Ok(())
}
/// Send a file attached to a message on a given channel.
///
/// The `text` is allowed to be empty, but the filename must always be specified.
pub fn send_file<R: ::std::io::Read>(&self, channel: &ChannelId, text: &str, mut file: R, filename: &str) -> Result<Message> {
let url = match hyper::Url::parse(&format!("{}/channels/{}/messages", API_BASE, channel.0)) {
Ok(url) => url,
Err(_) => return Err(Error::Other("Invalid URL in send_file"))
};
let mut request = try!(hyper::client::Request::new(hyper::method::Method::Post, url));
request.headers_mut().set(hyper::header::Authorization(self.token.clone()));
request.headers_mut().set(hyper::header::UserAgent(USER_AGENT.to_owned()));
let mut request = try!(multipart::client::Multipart::from_request(request));
try!(request.write_text("content", text));
try!(request.write_stream("file", &mut file, Some(filename), None));
Message::decode(try!(serde_json::from_reader(try!(request.send()))))
}
/// Acknowledge this message as "read" by this client.
pub fn ack_message(&self, channel: &ChannelId, message: &MessageId) -> Result<()> {
try!(self.request(||
self.client.post(&format!("{}/channels/{}/messages/{}/ack", API_BASE, channel.0, message.0))));
Ok(())
}
/// Create permissions for a `Channel` for a `Member` or `Role`.
///
/// # Examples
///
/// An example of creating channel role permissions for a `Member`:
///
/// ```ignore
/// use discord::model::{PermissionOverwriteType, permissions};
///
/// // Assuming that a `Discord` instance, member, and channel have already
/// // been defined previously.
/// let target = PermissionOverwrite {
/// kind: PermissionOverwriteType::Member(member.user.id),
/// allow: permissions::VOICE_CONNECT | permissions::VOICE_SPEAK,
/// deny: permissions::VOICE_MUTE_MEMBERS | permissions::VOICE_MOVE_MEMBERS,
/// };
/// let result = discord.create_permission(channel.id, target);
/// ```
///
/// The same can similarly be accomplished for a `Role`:
///
/// ```ignore
/// use discord::model::{PermissionOverwriteType, permissions};
///
/// // Assuming that a `Discord` instance, role, and channel have already
/// // been defined previously.
/// let target = PermissionOverwrite {
/// kind: PermissionOverwriteType::Role(role.id),
/// allow: permissions::VOICE_CONNECT | permissions::VOICE_SPEAK,
/// deny: permissions::VOICE_MUTE_MEMBERS | permissions::VOICE_MOVE_MEMBERS,
/// };
/// let result = discord.create_permission(channel.id, target);
/// ```
pub fn create_permission(&self, channel: ChannelId, target: PermissionOverwrite) -> Result<()> {
let (id, kind) = match target.kind {
PermissionOverwriteType::Member(id) => (id.0, "member"),
PermissionOverwriteType::Role(id) => (id.0, "role"),
};
let map = ObjectBuilder::new()
.insert("id", id)
.insert("allow", target.allow.bits())
.insert("deny", target.deny.bits())
.insert("type", kind)
.build();
let body = try!(serde_json::to_string(&map));
try!(self.request(|| self.client.put(
&format!("{}/channels/{}/permissions/{}", API_BASE, channel.0, id)).body(&body)));
Ok(())
}
/// Delete a `Member` or `Role`'s permissions for a `Channel`.
///
/// # Examples
///
/// Delete a `Member`'s permissions for a `Channel`:
///
/// ```ignore
/// use discord::model::PermissionOverwriteType;
///
/// // Assuming that a `Discord` instance, channel, and member have already
/// // been previously defined.
/// let target = PermissionOverwriteType::Member(member.user.id);
/// let response = discord.delete_permission(channel.id, target);
/// ```
///
/// The same can be accomplished for a `Role` similarly:
///
/// ```ignore
/// use discord::model::PermissionOverwriteType;
///
/// // Assuming that a `Discord` instance, channel, and role have already
/// // been previously defined.
/// let target = PermissionOverwriteType::Role(role.id);
/// let response = discord.delete_permission(channel.id, target);
/// ```
pub fn delete_permission(&self, channel: ChannelId, permission_type: PermissionOverwriteType) -> Result<()> {
let id = match permission_type {
PermissionOverwriteType::Member(id) => id.0,
PermissionOverwriteType::Role(id) => id.0,
};
try!(self.request(|| self.client.delete(
&format!("{}/channels/{}/permissions/{}", API_BASE, channel.0, id))));
Ok(())
}
/// Get the list of servers this user knows about.
pub fn get_servers(&self) -> Result<Vec<ServerInfo>> {
let response = try!(self.request(||
self.client.get(&format!("{}/users/@me/guilds", API_BASE))));
decode_array(try!(serde_json::from_reader(response)), ServerInfo::decode)
}
/// Create a new server with the given name.
pub fn create_server(&self, name: &str, region: &str, icon: Option<&str>) -> Result<Server> {
let map = ObjectBuilder::new()
.insert("name", name)
.insert("region", region)
.insert("icon", icon)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/guilds", API_BASE)).body(&body)));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Edit a server's information. See `EditServer` for the editable fields.
///
/// ```ignore
/// // Rename a server
/// discord.edit_server(server_id, |server| server.name("My Cool Server"));
/// // Edit many properties at once
/// discord.edit_server(server_id, |server| server
/// .name("My Cool Server")
/// .icon(Some("data:image/jpg;base64,..."))
/// .afk_timeout(300)
/// .region("us-south")
/// );
/// ```
pub fn edit_server<F: FnOnce(EditServer) -> EditServer>(&self, server_id: ServerId, f: F) -> Result<Server> {
let map = f(EditServer(ObjectBuilder::new())).0.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/guilds/{}", API_BASE, server_id.0)).body(&body)));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Leave the given server.
pub fn leave_server(&self, server: &ServerId) -> Result<Server> {
let response = try!(self.request(||
self.client.delete(&format!("{}/users/@me/guilds/{}", API_BASE, server.0))));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Delete the given server. Only available to the server owner.
pub fn delete_server(&self, server: &ServerId) -> Result<Server> {
let response = try!(self.request(||
self.client.delete(&format!("{}/guilds/{}", API_BASE, server.0))));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Get the ban list for the given server.
pub fn get_bans(&self, server: &ServerId) -> Result<Vec<User>> {
let response = try!(self.request(||
self.client.get(&format!("{}/guilds/{}/bans", API_BASE, server.0))));
decode_array(try!(serde_json::from_reader(response)), User::decode_ban)
}
/// Ban a user from the server, optionally deleting their recent messages.
///
/// Zero may be passed for `delete_message_days` if no deletion is desired.
pub fn add_ban(&self, server: &ServerId, user: &UserId, delete_message_days: u32) -> Result<()> {
try!(self.request(|| self.client.put(
&format!("{}/guilds/{}/bans/{}?delete_message_days={}", API_BASE, server.0, user.0, delete_message_days))));
Ok(())
}
/// Unban a user from the server.
pub fn remove_ban(&self, server: &ServerId, user: &UserId) -> Result<()> {
try!(self.request(|| self.client.delete(
&format!("{}/guilds/{}/bans/{}", API_BASE, server.0, user.0))));
Ok(())
}
/// Extract information from an invite.
///
/// The invite should either be a URL of the form `http://discord.gg/CODE`,
/// or a string containing just the `CODE`.
pub fn get_invite(&self, invite: &str) -> Result<Invite> {
let invite = resolve_invite(invite);
let response = try!(self.request(||
self.client.get(&format!("{}/invite/{}", API_BASE, invite))));
Invite::decode(try!(serde_json::from_reader(response)))
}
/// Get the active invites for a server.
pub fn get_server_invites(&self, server: ServerId) -> Result<Vec<RichInvite>> {
let response = try!(self.request(||
self.client.get(&format!("{}/guilds/{}/invites", API_BASE, server.0))));
decode_array(try!(serde_json::from_reader(response)), RichInvite::decode)
}
/// Get the active invites for a channel.
pub fn get_channel_invites(&self, channel: ChannelId) -> Result<Vec<RichInvite>> {
let response = try!(self.request(||
self.client.get(&format!("{}/channels/{}/invites", API_BASE, channel.0))));
decode_array(try!(serde_json::from_reader(response)), RichInvite::decode)
}
/// Accept an invite. See `get_invite` for details.
pub fn accept_invite(&self, invite: &str) -> Result<Invite> {
let invite = resolve_invite(invite);
let response = try!(self.request(||
self.client.post(&format!("{}/invite/{}", API_BASE, invite))));
Invite::decode(try!(serde_json::from_reader(response)))
}
/// Create an invite to a channel.
///
/// Passing 0 for `max_age` or `max_uses` means no limit. `max_age` should
/// be specified in seconds.
pub fn create_invite(&self, channel: ChannelId,
max_age: u64, max_uses: u64,
temporary: bool
) -> Result<RichInvite> {
let map = ObjectBuilder::new()
.insert("validate", serde_json::Value::Null)
.insert("max_age", max_age)
.insert("max_uses", max_uses)
.insert("temporary", temporary)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/channels/{}/invites", API_BASE, channel.0)).body(&body)));
RichInvite::decode(try!(serde_json::from_reader(response)))
}
/// Delete an invite. See `get_invite` for details.
pub fn delete_invite(&self, invite: &str) -> Result<Invite> {
let invite = resolve_invite(invite);
let response = try!(self.request(||
self.client.delete(&format!("{}/invite/{}", API_BASE, invite))));
Invite::decode(try!(serde_json::from_reader(response)))
}
/// Retrieve a member object for a server given the member's user id.
pub fn get_member(&self, server: ServerId, user: UserId) -> Result<Member> {
let response = try!(self.request(|| self.client.get(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0))));
Member::decode(try!(serde_json::from_reader(response)))
}
/// Edit the list of roles assigned to a member of a server.
pub fn edit_member_roles(&self, server: &ServerId, user: &UserId, roles: &[RoleId]) -> Result<()> {
self.edit_member(*server, *user, |m| m.roles(roles))
}
/// Edit member information, including roles, nickname, and voice state.
///
/// See the `EditMember` struct for the editable fields.
pub fn edit_member<F: FnOnce(EditMember) -> EditMember>(&self, server: ServerId, user: UserId, f: F) -> Result<()> {
let map = f(EditMember(ObjectBuilder::new())).0.build();
let body = try!(serde_json::to_string(&map));
try!(self.request(|| self.client.patch(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0)).body(&body)));
Ok(())
}
/// Kick a member from a server.
pub fn kick_member(&self, server: &ServerId, user: &UserId) -> Result<()> {
try!(self.request(|| self.client.delete(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0))));
Ok(())
}
// Create role
// Edit role
// Reorder roles
// Delete roles
/// Create a private channel with the given user, or return the existing
/// one if it exists.
pub fn create_private_channel(&self, recipient: &UserId) -> Result<PrivateChannel> {
let map = ObjectBuilder::new()
.insert("recipient_id", &recipient.0)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/users/@me/channels", API_BASE)).body(&body)));
PrivateChannel::decode(try!(serde_json::from_reader(response)))
}
/// Get the URL at which a user's avatar is located.
pub fn get_user_avatar_url(&self, user: &UserId, avatar: &str) -> String {
format!("{}/users/{}/avatars/{}.jpg", API_BASE, user.0, avatar)
}
/// Download a user's avatar.
pub fn get_user_avatar(&self, user: &UserId, avatar: &str) -> Result<Vec<u8>> {
use std::io::Read;
let mut response = try!(retry(||
self.client.get(&self.get_user_avatar_url(user, avatar))));
let mut vec = Vec::new();
try!(response.read_to_end(&mut vec));
Ok(vec)
}
/// Edit the logged-in user's profile. See `EditProfile` for editable fields.
///
/// This method requires mutable access because editing the profile generates a new token.
pub fn edit_profile<F: FnOnce(EditProfile) -> EditProfile>(&mut self, f: F) -> Result<CurrentUser> {
// First, get the current profile, so that providing username and avatar is optional.
let response = try!(self.request(||
self.client.get(&format!("{}/users/@me", API_BASE))));
let user = try!(CurrentUser::decode(try!(serde_json::from_reader(response))));
let mut map = ObjectBuilder::new()
.insert("username", user.username)
.insert("avatar", user.avatar);
if let Some(email) = user.email.as_ref() {
map = map.insert("email", email);
}
// Then, send the profile patch.
let map = f(EditProfile(map)).0.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/users/@me", API_BASE)).body(&body)));
let mut json: BTreeMap<String, serde_json::Value> = try!(serde_json::from_reader(response));
// If a token was included in the response, switch to it. Important because if the
// password was changed, the old token is invalidated.
if let Some(serde_json::Value::String(token)) = json.remove("token") {
self.token = token;
}
CurrentUser::decode(serde_json::Value::Object(json))
}
/// Get the list of available voice regions for a server.
pub fn get_voice_regions(&self) -> Result<Vec<VoiceRegion>> {
let response = try!(self.request(|| self.client.get(&format!("{}/voice/regions", API_BASE))));
decode_array(try!(serde_json::from_reader(response)), VoiceRegion::decode)
}
/// Move a server member to another voice channel.
pub fn move_member_voice(&self, server: &ServerId, user: &UserId, channel: &ChannelId) -> Result<()> {
let map = ObjectBuilder::new()
.insert("channel_id", &channel.0)
.build();
let body = try!(serde_json::to_string(&map));
try!(self.request(||
self.client.patch(&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0)).body(&body)));
Ok(())
}
/// Start a prune operation, kicking members who have been inactive for the
/// specified number of days. Members with a role assigned will never be
/// pruned.
pub fn begin_server_prune(&self, server: ServerId, days: u16) -> Result<ServerPrune> {
let map = ObjectBuilder::new()
.insert("days", days)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(|| self.client.post(
&format!("{}/guilds/{}/prune", API_BASE, server.0)).body(&body)));
ServerPrune::decode(try!(serde_json::from_reader(response)))
}
/// Get the number of members who have been inactive for the specified
/// number of days and would be pruned by a prune operation. Members with a
/// role assigned will never be pruned.
pub fn get_server_prune_count(&self, server: ServerId, days: u16) -> Result<ServerPrune> {
let map = ObjectBuilder::new()
.insert("days", days)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(|| self.client.get(
&format!("{}/guilds/{}/prune", API_BASE, server.0)).body(&body)));
ServerPrune::decode(try!(serde_json::from_reader(response)))
}
/// Sets a note for the user that is readable only to the currently logged
/// in user.
///
/// This endpoint is only available for users, and so does not work for
/// bots.
pub fn edit_note(&self, user: UserId, note: &str) -> Result<()> {
let map = ObjectBuilder::new()
.insert("note", note)
.build();
let body = try!(serde_json::to_string(&map));
try!(self.request(||
self.client.put(&format!("{}/users/@me/notes/{}", API_BASE, user.0)).body(&body)));
Ok(())
}
/// Retrieves information about the application and the owner.
pub fn get_application_info(&self) -> Result<ApplicationInfo> {
let response = try!(self.request(||
self.client.get(&format!("{}/oauth2/applications/@me", API_BASE))));
ApplicationInfo::decode(try!(serde_json::from_reader(response)))
}
/// Establish a websocket connection over which events can be received.
///
/// Also returns the `ReadyEvent` sent by Discord upon establishing the
/// connection, which contains the initial state as seen by the client.
///
/// See `connect_sharded` if you want to use guild sharding.
pub fn connect(&self) -> Result<(Connection, ReadyEvent)> {
self.__connect(None)
}
/// Establish a sharded websocket connection over which events can be
/// received.
///
/// The `shard_id` is indexed at 0 while `total_shards` is indexed at 1.
///
/// Also returns the `ReadyEvent` sent by Discord upon establishing the
/// connection, which contains the initial state as seen by the client.
///
/// See `connect` if you do not want to use guild sharding.
pub fn connect_sharded(&self, shard_id: u8, total_shards: u8) -> Result<(Connection, ReadyEvent)> {
self.__connect(Some([shard_id, total_shards]))
}
fn __connect(&self, shard_info: Option<[u8; 2]>) -> Result<(Connection, ReadyEvent)> {
let response = try!(self.request(|| self.client.get(&format!("{}/gateway", API_BASE))));
let value: BTreeMap<String, String> = try!(serde_json::from_reader(response));
let url = match value.get("url") {
Some(url) => url,
None => return Err(Error::Protocol("Response missing \"url\" in Discord::connect()"))
};
Connection::new(&url, &self.token, shard_info)
}
}
/// Read an image from a file into a string suitable for upload.
///
/// If the file's extension is `.png`, the claimed media type will be `image/png`, or `image/jpg`
/// otherwise. Note that Discord may convert the image to JPEG or another format after upload.
pub fn read_image<P: AsRef<::std::path::Path>>(path: P) -> Result<String> {
use std::io::Read;
let path = path.as_ref();
let mut vec = Vec::new();
try!(try!(std::fs::File::open(path)).read_to_end(&mut vec));
Ok(format!("data:image/{};base64,{}",
if path.extension() == Some("png".as_ref()) { "png" } else { "jpg" },
base64::encode(&vec),
))
}
/// Retrieves the active maintenance statuses.
pub fn get_active_maintenances() -> Result<Vec<Maintenance>> {
let client = hyper::Client::new();
let response = try!(retry(|| client.get(
&format!("{}/api/v2/scheduled-maintenances/active.json", STATUS_BASE))));
let mut json: BTreeMap<String, serde_json::Value> = try!(serde_json::from_reader(response));
match json.remove("scheduled_maintenances") {
Some(scheduled_maintenances) => decode_array(scheduled_maintenances, Maintenance::decode),
None => Ok(vec![]),
}
}
/// Retrieves the upcoming maintenance statuses.
pub fn get_upcoming_maintenances() -> Result<Vec<Maintenance>> {
let client = hyper::Client::new();
let response = try!(retry(|| client.get(
&format!("{}/api/v2/scheduled-maintenances/upcoming.json", STATUS_BASE))));
let mut json: BTreeMap<String, serde_json::Value> = try!(serde_json::from_reader(response));
match json.remove("scheduled_maintenances") {
Some(scheduled_maintenances) => decode_array(scheduled_maintenances, Maintenance::decode),
None => Ok(vec![]),
}
}
/// Argument to `get_messages` to specify the desired message retrieval.
pub enum GetMessages {
/// Get the N most recent messages.
MostRecent,
/// Get the first N messages before the specified message.
Before(MessageId),
/// Get the first N messages after the specified message.
After(MessageId),
/// Get N/2 messages before, N/2 messages after, and the specified message.
Around(MessageId),
}
/// Patch content for the `edit_server` call.
pub struct EditServer(ObjectBuilder);
impl EditServer {
/// Edit the server's name.
pub fn name(self, name: &str) -> Self {
EditServer(self.0.insert("name", name))
}
/// Edit the server's voice region.
pub fn region(self, region: &str) -> Self {
EditServer(self.0.insert("region", region))
}
/// Edit the server's icon. Use `None` to remove the icon.
pub fn icon(self, icon: Option<&str>) -> Self {
EditServer(match icon {
Some(icon) => self.0.insert("icon", icon),
None => self.0.insert("icon", serde_json::Value::Null),
})
}
/// Edit the server's AFK channel. Use `None` to select no AFK channel.
pub fn afk_channel(self, channel: Option<ChannelId>) -> Self {
EditServer(match channel {
Some(ch) => self.0.insert("afk_channel_id", ch.0),
None => self.0.insert("afk_channel_id", serde_json::Value::Null),
})
}
/// Edit the server's AFK timeout.
pub fn afk_timeout(self, timeout: u64) -> Self {
EditServer(self.0.insert("afk_timeout", timeout))
}
/// Transfer ownership of the server to a new owner.
pub fn owner(self, owner: UserId) -> Self {
EditServer(self.0.insert("owner_id", owner.0))
}
/// Edit the verification level of the server.
pub fn verification_level(self, verification_level: VerificationLevel) -> Self {
EditServer(self.0.insert("verification_level", verification_level.num()))
}
/// Edit the server's splash. Use `None` to remove the splash.
pub fn splash(self, splash: Option<&str>) -> Self {
EditServer(match splash {
Some(splash) => self.0.insert("splash", splash),
None => self.0.insert("splash", serde_json::Value::Null),
})
}
}
/// Patch content for the `edit_channel` call.
pub struct EditChannel(ObjectBuilder);
impl EditChannel {
/// Edit the channel's name.
pub fn name(self, name: &str) -> Self {
EditChannel(self.0.insert("name", name))
}
/// Edit the text channel's topic.
pub fn topic(self, topic: &str) -> Self {
EditChannel(self.0.insert("topic", topic))
}
/// Edit the channel's position in the list.
pub fn position(self, position: u64) -> Self {
EditChannel(self.0.insert("position", position))
}
/// Edit the voice channel's bitrate.
pub fn bitrate(self, bitrate: u64) -> Self {
EditChannel(self.0.insert("bitrate", bitrate))
}
/// Edit the voice channel's user limit. Both `None` and `Some(0)` mean "unlimited".
pub fn user_limit(self, user_limit: u64) -> Self {
EditChannel(self.0.insert("user_limit", user_limit))
}
}
/// Patch content for the `edit_member` call.
pub struct EditMember(ObjectBuilder);
impl EditMember {
/// Edit the member's nickname. Supply the empty string to remove a nickname.
pub fn nickname(self, nick: &str) -> Self {
EditMember(self.0.insert("nick", nick))
}
/// Edit whether the member is server-muted.
pub fn mute(self, mute: bool) -> Self {
EditMember(self.0.insert("mute", mute))
}
/// Edit whether the member is server-deafened.
pub fn deaf(self, deafen: bool) -> Self {
EditMember(self.0.insert("deaf", deafen))
}
/// Edit the member's assigned roles.
pub fn roles(self, roles: &[RoleId]) -> Self {
EditMember(self.0.insert_array("roles",
|ab| roles.iter().fold(ab, |ab, id| ab.push(id.0))))
}
/// Move the member to another voice channel.
pub fn channel(self, channel: ChannelId) -> Self {
EditMember(self.0.insert("channel_id", channel.0))
}
}
/// Patch content for the `edit_profile` call.
pub struct EditProfile(ObjectBuilder);
impl EditProfile {
/// Edit the user's username. Must be between 2 and 32 characters long.
pub fn username(self, username: &str) -> Self {
EditProfile(self.0.insert("username", username))
}
/// Edit the user's avatar. Use `None` to remove the avatar.
pub fn avatar(self, icon: Option<&str>) -> Self {
EditProfile(match icon {
Some(icon) => self.0.insert("avatar", icon),
None => self.0.insert("avatar", serde_json::Value::Null),
})
}
/// Provide the user's current password for authentication. Does not apply to bot accounts, and
/// must be supplied for user accounts.
pub fn password(self, password: &str) -> Self {
EditProfile(self.0.insert("password", password))
}
/// Edit the user's email address. Does not apply to bot accounts.
pub fn email(self, email: &str) -> Self {
EditProfile(self.0.insert("email", email))
}
/// Edit the user's password. Does not apply to bot accounts.
pub fn new_password(self, password: &str) -> Self {
EditProfile(self.0.insert("new_password", password))
}
}
fn retry<'a, F: Fn() -> hyper::client::RequestBuilder<'a>>(f: F) -> Result<hyper::client::Response> {
let f2 = || check_status(f()
.header(hyper::header::UserAgent(USER_AGENT.to_owned()))
.send());
// retry on a ConnectionAborted, which occurs if it's been a while since the last request
match f2() {
Err(Error::Hyper(hyper::error::Error::Io(ref io)))
if io.kind() == std::io::ErrorKind::ConnectionAborted => f2(),
other => other
}
}
fn check_status(response: hyper::Result<hyper::client::Response>) -> Result<hyper::client::Response> {
let response = try!(response);
if !response.status.is_success() {
return Err(Error::from_response(response))
}
Ok(response)
}
fn resolve_invite(invite: &str) -> &str {
if invite.starts_with("http://discord.gg/") {
&invite[18..]
} else if invite.starts_with("https://discord.gg/") {
&invite[19..]
} else if invite.starts_with("discord.gg/") {
&invite[11..]
} else {
invite
}
}
fn sleep_ms(millis: u64) {
std::thread::sleep(std::time::Duration::from_millis(millis))
}
// Timer that remembers when it is supposed to go off
struct Timer {
next_tick_at: time::Timespec,
tick_len: time::Duration,
}
#[cfg_attr(not(feature="voice"), allow(dead_code))]
impl Timer {
fn new(tick_len_ms: u64) -> Timer {
let tick_len = time::Duration::milliseconds(tick_len_ms as i64);
Timer {
next_tick_at: time::get_time() + tick_len,
tick_len: tick_len,
}
}
#[allow(dead_code)]
fn immediately(&mut self) {
self.next_tick_at = time::get_time();
}
fn defer(&mut self) {
self.next_tick_at = time::get_time() + self.tick_len;
}
fn check_tick(&mut self) -> bool {
time::get_time() >= self.next_tick_at && {
self.next_tick_at = self.next_tick_at + self.tick_len; true
}
}
fn sleep_until_tick(&mut self) {
let difference = self.next_tick_at - time::get_time();
if difference > time::Duration::zero() {
sleep_ms(difference.num_milliseconds() as u64)
}
self.next_tick_at = self.next_tick_at + self.tick_len;
}
}
trait ReceiverExt {
fn recv_json<F, T>(&mut self, decode: F) -> Result<T> where F: FnOnce(serde_json::Value) -> Result<T>;
}
trait SenderExt {
fn send_json(&mut self, value: &serde_json::Value) -> Result<()>;
}
impl ReceiverExt for websocket::client::Receiver<websocket::stream::WebSocketStream> {
fn recv_json<F, T>(&mut self, decode: F) -> Result<T> where F: FnOnce(serde_json::Value) -> Result<T> {
use websocket::message::{Message, Type};
use websocket::ws::receiver::Receiver;
let message: Message = try!(self.recv_message());
if message.opcode == Type::Close {
Err(Error::Closed(message.cd_status_code, String::from_utf8_lossy(&message.payload).into_owned()))
} else if message.opcode != Type::Text {
Err(Error::Closed(None, String::from_utf8_lossy(&message.payload).into_owned()))
} else {
serde_json::from_reader::<_, serde_json::Value>(&message.payload[..])
.map_err(From::from)
.and_then(decode)
.map_err(|e| {
warn!("Error decoding: {}", String::from_utf8_lossy(&message.payload));
e
})
}
}
}
impl SenderExt for websocket::client::Sender<websocket::stream::WebSocketStream> {
fn send_json(&mut self, value: &serde_json::Value) -> Result<()> {
use websocket::message::Message;
use websocket::ws::sender::Sender;
serde_json::to_string(value)
.map(Message::text)
.map_err(Error::from)
.and_then(|m| self.send_message(&m).map_err(Error::from))
}
}
mod internal {
pub enum Status {
SendMessage(::serde_json::Value),
Sequence(u64),
ChangeInterval(u64),
ChangeSender(::websocket::client::Sender<::websocket::stream::WebSocketStream>),
}
}
Debug log if a 204 was expected but not received
//! Client library for the [Discord](https://discordapp.com) API.
//!
//! The Discord API can be divided into three main components: the RESTful API
//! to which calls can be made to take actions, a websocket-based permanent
//! connection over which state updates are received, and the voice calling
//! system.
//!
//! Log in to Discord with `Discord::new`, `new_cache`, or `from_bot_token` as appropriate.
//! The resulting value can be used to make REST API calls to post messages and manipulate Discord
//! state. Calling `connect()` will open a websocket connection, through which events can be
//! received. These two channels are enough to write a simple chatbot which can
//! read and respond to messages.
//!
//! For more in-depth tracking of Discord state, a `State` can be seeded with
//! the `ReadyEvent` obtained when opening a `Connection` and kept updated with
//! the events received over it.
//!
#![cfg_attr(not(feature="voice"), doc="*<b>NOTE</b>: The library has been compiled without voice support.*")]
//! To join voice servers, call `Connection::voice` to get a `VoiceConnection` and use `connect`
//! to join a channel, then `play` and `stop` to control playback. Manipulating deaf/mute state
//! and receiving audio are also possible.
//!
//! For examples, see the `examples` directory in the source tree.
#![warn(missing_docs)]
extern crate hyper;
extern crate serde_json;
extern crate websocket;
#[macro_use]
extern crate bitflags;
extern crate byteorder;
#[cfg(feature="voice")]
extern crate opus;
extern crate time;
#[macro_use]
extern crate log;
#[cfg(feature="voice")]
extern crate sodiumoxide;
extern crate multipart;
extern crate base64;
use std::collections::BTreeMap;
use serde_json::builder::ObjectBuilder;
mod error;
mod connection;
mod state;
#[cfg(feature="voice")]
pub mod voice;
pub mod model;
pub use error::{Result, Error};
pub use connection::Connection;
pub use state::{State, ChannelRef};
use model::*;
const USER_AGENT: &'static str = concat!("DiscordBot (https://github.com/SpaceManiac/discord-rs, ", env!("CARGO_PKG_VERSION"), ")");
const API_BASE: &'static str = "https://discordapp.com/api/v6";
const STATUS_BASE: &'static str = "https://status.discordapp.com";
/// Client for the Discord REST API.
///
/// Log in to the API with a user's email and password using `new()`. Call
/// `connect()` to create a `Connection` on which to receive events. If desired,
/// use `logout()` to invalidate the token when done. Other methods manipulate
/// the Discord REST API.
pub struct Discord {
client: hyper::Client,
token: String,
}
impl Discord {
/// Log in to the Discord Rest API and acquire a token.
pub fn new(email: &str, password: &str) -> Result<Discord> {
let mut map = BTreeMap::new();
map.insert("email", email);
map.insert("password", password);
let client = hyper::Client::new();
let response = try!(check_status(client.post(&format!("{}/auth/login", API_BASE))
.header(hyper::header::ContentType::json())
.header(hyper::header::UserAgent(USER_AGENT.to_owned()))
.body(&try!(serde_json::to_string(&map)))
.send()));
let mut json: BTreeMap<String, String> = try!(serde_json::from_reader(response));
let token = match json.remove("token") {
Some(token) => token,
None => return Err(Error::Protocol("Response missing \"token\" in Discord::new()"))
};
Ok(Discord {
client: client,
token: token,
})
}
/// Log in to the Discord Rest API, possibly using a cached login token.
///
/// Cached login tokens are keyed to the email address and will be read from
/// and written to the specified path. If no cached token was found and no
/// password was specified, an error is returned.
pub fn new_cache<P: AsRef<std::path::Path>>(path: P, email: &str, password: Option<&str>) -> Result<Discord> {
use std::io::{Write, BufRead, BufReader};
use std::fs::File;
// Read the cache, looking for our token
let path = path.as_ref();
let mut initial_token: Option<String> = None;
if let Ok(file) = File::open(path) {
for line in BufReader::new(file).lines() {
let line = try!(line);
let parts: Vec<_> = line.split('\t').collect();
if parts.len() == 2 && parts[0] == email {
initial_token = Some(parts[1].trim().into());
break;
}
}
}
// Perform the login
let discord = if let Some(ref initial_token) = initial_token {
let mut map = BTreeMap::new();
map.insert("email", email);
if let Some(password) = password {
map.insert("password", password);
}
let client = hyper::Client::new();
let response = try!(check_status(client.post(&format!("{}/auth/login", API_BASE))
.header(hyper::header::ContentType::json())
.header(hyper::header::UserAgent(USER_AGENT.to_owned()))
.header(hyper::header::Authorization(initial_token.clone()))
.body(&try!(serde_json::to_string(&map)))
.send()));
let mut json: BTreeMap<String, String> = try!(serde_json::from_reader(response));
let token = match json.remove("token") {
Some(token) => token,
None => return Err(Error::Protocol("Response missing \"token\" in Discord::new()"))
};
Discord {
client: client,
token: token,
}
} else {
if let Some(password) = password {
try!(Discord::new(email, password))
} else {
return Err(Error::Other("No password was specified and no cached token was found"))
}
};
// Write the token back out, if needed
if initial_token.as_ref() != Some(&discord.token) {
let mut tokens = Vec::new();
tokens.push(format!("{}\t{}", email, discord.token));
if let Ok(file) = File::open(path) {
for line in BufReader::new(file).lines() {
let line = try!(line);
if line.split('\t').next() != Some(email) {
tokens.push(line);
}
}
}
let mut file = try!(File::create(path));
for line in tokens {
try!(file.write_all(line.as_bytes()));
try!(file.write_all(&[b'\n']));
}
}
Ok(discord)
}
/// Log in as a bot account using the given authentication token.
pub fn from_bot_token(token: &str) -> Result<Discord> {
Ok(Discord {
client: hyper::Client::new(),
token: format!("Bot {}", token),
})
}
/// Log out from the Discord API, invalidating this clients's token.
pub fn logout(self) -> Result<()> {
self.request(|| self.client.post(&format!("{}/auth/logout", API_BASE)))
.and_then(check_empty)
}
fn request<'a, F: Fn() -> hyper::client::RequestBuilder<'a>>(&self, f: F) -> Result<hyper::client::Response> {
retry(|| f()
.header(hyper::header::ContentType::json())
.header(hyper::header::Authorization(self.token.clone())))
}
/// Create a channel.
pub fn create_channel(&self, server: &ServerId, name: &str, kind: ChannelType) -> Result<Channel> {
let map = ObjectBuilder::new()
.insert("name", name)
.insert("type", kind.name())
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/guilds/{}/channels", API_BASE, server.0)).body(&body)));
Channel::decode(try!(serde_json::from_reader(response)))
}
/// Get information about a channel.
pub fn get_channel(&self, channel: ChannelId) -> Result<Channel> {
let response = try!(self.request(||
self.client.get(&format!("{}/channels/{}", API_BASE, channel.0))));
Channel::decode(try!(serde_json::from_reader(response)))
}
/// Edit a channel's details. See `EditChannel` for the editable fields.
///
/// ```ignore
/// // Edit a channel's name and topic
/// discord.edit_channel(channel_id, "general", |ch| ch
/// .topic("Welcome to the general chat!")
/// );
/// ```
pub fn edit_channel<F: FnOnce(EditChannel) -> EditChannel>(&self, channel: ChannelId, f: F) -> Result<PublicChannel> {
// Work around the fact that this supposed PATCH call actually requires all fields
let map = match try!(self.get_channel(channel)) {
Channel::Private(_) => return Err(Error::Other("Can not edit private channels")),
Channel::Public(channel) => {
let map = ObjectBuilder::new()
.insert("name", channel.name)
.insert("position", channel.position);
match channel.kind {
ChannelType::Text => map.insert("topic", channel.topic),
ChannelType::Voice => map.insert("bitrate", channel.bitrate).insert("user_limit", channel.user_limit),
_ => return Err(Error::Other(stringify!(format!("Unreachable channel type: {:?}", channel.kind)))),
}
},
Channel::Group(group) => ObjectBuilder::new().insert("name", group.name),
};
let map = f(EditChannel(map)).0.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/channels/{}", API_BASE, channel.0)).body(&body)));
PublicChannel::decode(try!(serde_json::from_reader(response)))
}
/// Delete a channel.
pub fn delete_channel(&self, channel: &ChannelId) -> Result<Channel> {
let response = try!(self.request(||
self.client.delete(&format!("{}/channels/{}", API_BASE, channel.0))));
Channel::decode(try!(serde_json::from_reader(response)))
}
/// Indicate typing on a channel for the next 5 seconds.
pub fn broadcast_typing(&self, channel: &ChannelId) -> Result<()> {
self.request(|| self.client.post(&format!("{}/channels/{}/typing", API_BASE, channel.0)))
.and_then(check_empty)
}
/// Get a single message by ID from a given channel.
pub fn get_message(&self, channel: ChannelId, message: MessageId) -> Result<Message> {
let response = try!(self.request(|| self.client.get(&format!("{}/channels/{}/messages/{}", API_BASE, channel.0, message.0))));
Message::decode(try!(serde_json::from_reader(response)))
}
/// Get messages in the backlog for a given channel.
///
/// The `what` argument should be one of the options in the `GetMessages`
/// enum, and will determine which messages will be returned. A message
/// limit can also be specified, and defaults to 50. More recent messages
/// will appear first in the list.
pub fn get_messages(&self, channel: ChannelId, what: GetMessages, limit: Option<u64>) -> Result<Vec<Message>> {
use std::fmt::Write;
let mut url = format!("{}/channels/{}/messages?limit={}", API_BASE, channel.0, limit.unwrap_or(50));
match what {
GetMessages::MostRecent => {},
GetMessages::Before(id) => { let _ = write!(url, "&before={}", id.0); },
GetMessages::After(id) => { let _ = write!(url, "&after={}", id.0); },
GetMessages::Around(id) => { let _ = write!(url, "&around={}", id.0); },
}
let response = try!(self.request(|| self.client.get(&url)));
decode_array(try!(serde_json::from_reader(response)), Message::decode)
}
/// Gets the pinned messages for a given channel.
pub fn get_pinned_messages(&self, channel: ChannelId) -> Result<Vec<Message>> {
let response = try!(self.request(|| self.client.get(
&format!("{}/channels/{}/pins", API_BASE, channel.0))));
let value = try!(serde_json::from_reader(response));
decode_array(value, Message::decode)
}
/// Pin the given message to the given channel.
///
/// Requires that the logged in account have the "MANAGE_MESSAGES" permission.
pub fn pin_message(&self, channel: ChannelId, message: MessageId) -> Result<()> {
self.request(|| self.client.put(
&format!("{}/channels/{}/pins/{}", API_BASE, channel.0, message.0)
)).and_then(check_empty)
}
/// Removes the given message from being pinned to the given channel.
///
/// Requires that the logged in account have the "MANAGE_MESSAGES" permission.
pub fn unpin_message(&self, channel: ChannelId, message: MessageId) -> Result<()> {
self.request(|| self.client.delete(
&format!("{}/channels/{}/pins/{}", API_BASE, channel.0, message.0)
)).and_then(check_empty)
}
/// Send a message to a given channel.
///
/// The `nonce` will be returned in the result and also transmitted to other
/// clients. The empty string is a good default if you don't care.
pub fn send_message(&self, channel: &ChannelId, text: &str, nonce: &str, tts: bool) -> Result<Message> {
let map = ObjectBuilder::new()
.insert("content", text)
.insert("nonce", nonce)
.insert("tts", tts)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/channels/{}/messages", API_BASE, channel.0)).body(&body)));
Message::decode(try!(serde_json::from_reader(response)))
}
/// Edit a previously posted message.
///
/// Requires that either the message was posted by this user, or this user
/// has permission to manage other members' messages.
pub fn edit_message(&self, channel: &ChannelId, message: &MessageId, text: &str) -> Result<Message> {
let map = ObjectBuilder::new()
.insert("content", text)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/channels/{}/messages/{}", API_BASE, channel.0, message.0)).body(&body)));
Message::decode(try!(serde_json::from_reader(response)))
}
/// Delete a previously posted message.
///
/// Requires that either the message was posted by this user, or this user
/// has permission to manage other members' messages.
pub fn delete_message(&self, channel: &ChannelId, message: &MessageId) -> Result<()> {
self.request(|| self.client.delete(
&format!("{}/channels/{}/messages/{}", API_BASE, channel.0, message.0)
)).and_then(check_empty)
}
/// Bulk deletes a list of `MessageId`s from a given channel.
///
/// A minimum of 2 unique messages and a maximum of 100 unique messages may
/// be supplied, otherwise an `Error::Other` will be returned.
///
/// Each MessageId *should* be unique as duplicates will be removed from the
/// array before being sent to the Discord API.
///
/// Only bots can use this endpoint. Regular user accounts can not use this
/// endpoint under any circumstance.
///
/// Requires that either the message was posted by this user, or this user
/// has permission to manage other members' messages.
pub fn delete_messages(&self, channel: ChannelId, messages: &[MessageId]) -> Result<()> {
// Create a Vec of the underlying u64's of the message ids, then remove
// duplicates in it.
let mut ids: Vec<u64> = messages.into_iter().map(|m| m.0).collect();
ids.sort();
ids.dedup();
if ids.len() < 2 {
return Err(Error::Other("A minimum of 2 message ids must be supplied"));
} else if ids.len() > 100 {
return Err(Error::Other("A maximum of 100 message ids may be supplied"));
}
let map = ObjectBuilder::new()
.insert("messages", ids)
.build();
let body = try!(serde_json::to_string(&map));
self.request(|| self.client.post(
&format!("{}/channels/{}/messages/bulk_delete", API_BASE, channel.0)
).body(&body)).and_then(check_empty)
}
/// Send a file attached to a message on a given channel.
///
/// The `text` is allowed to be empty, but the filename must always be specified.
pub fn send_file<R: ::std::io::Read>(&self, channel: &ChannelId, text: &str, mut file: R, filename: &str) -> Result<Message> {
let url = match hyper::Url::parse(&format!("{}/channels/{}/messages", API_BASE, channel.0)) {
Ok(url) => url,
Err(_) => return Err(Error::Other("Invalid URL in send_file"))
};
let mut request = try!(hyper::client::Request::new(hyper::method::Method::Post, url));
request.headers_mut().set(hyper::header::Authorization(self.token.clone()));
request.headers_mut().set(hyper::header::UserAgent(USER_AGENT.to_owned()));
let mut request = try!(multipart::client::Multipart::from_request(request));
try!(request.write_text("content", text));
try!(request.write_stream("file", &mut file, Some(filename), None));
Message::decode(try!(serde_json::from_reader(try!(request.send()))))
}
/// Acknowledge this message as "read" by this client.
pub fn ack_message(&self, channel: &ChannelId, message: &MessageId) -> Result<()> {
self.request(|| self.client.post(
&format!("{}/channels/{}/messages/{}/ack", API_BASE, channel.0, message.0)
)).and_then(check_empty)
}
/// Create permissions for a `Channel` for a `Member` or `Role`.
///
/// # Examples
///
/// An example of creating channel role permissions for a `Member`:
///
/// ```ignore
/// use discord::model::{PermissionOverwriteType, permissions};
///
/// // Assuming that a `Discord` instance, member, and channel have already
/// // been defined previously.
/// let target = PermissionOverwrite {
/// kind: PermissionOverwriteType::Member(member.user.id),
/// allow: permissions::VOICE_CONNECT | permissions::VOICE_SPEAK,
/// deny: permissions::VOICE_MUTE_MEMBERS | permissions::VOICE_MOVE_MEMBERS,
/// };
/// let result = discord.create_permission(channel.id, target);
/// ```
///
/// The same can similarly be accomplished for a `Role`:
///
/// ```ignore
/// use discord::model::{PermissionOverwriteType, permissions};
///
/// // Assuming that a `Discord` instance, role, and channel have already
/// // been defined previously.
/// let target = PermissionOverwrite {
/// kind: PermissionOverwriteType::Role(role.id),
/// allow: permissions::VOICE_CONNECT | permissions::VOICE_SPEAK,
/// deny: permissions::VOICE_MUTE_MEMBERS | permissions::VOICE_MOVE_MEMBERS,
/// };
/// let result = discord.create_permission(channel.id, target);
/// ```
pub fn create_permission(&self, channel: ChannelId, target: PermissionOverwrite) -> Result<()> {
let (id, kind) = match target.kind {
PermissionOverwriteType::Member(id) => (id.0, "member"),
PermissionOverwriteType::Role(id) => (id.0, "role"),
};
let map = ObjectBuilder::new()
.insert("id", id)
.insert("allow", target.allow.bits())
.insert("deny", target.deny.bits())
.insert("type", kind)
.build();
let body = try!(serde_json::to_string(&map));
self.request(|| self.client.put(
&format!("{}/channels/{}/permissions/{}", API_BASE, channel.0, id)
).body(&body)).and_then(check_empty)
}
/// Delete a `Member` or `Role`'s permissions for a `Channel`.
///
/// # Examples
///
/// Delete a `Member`'s permissions for a `Channel`:
///
/// ```ignore
/// use discord::model::PermissionOverwriteType;
///
/// // Assuming that a `Discord` instance, channel, and member have already
/// // been previously defined.
/// let target = PermissionOverwriteType::Member(member.user.id);
/// let response = discord.delete_permission(channel.id, target);
/// ```
///
/// The same can be accomplished for a `Role` similarly:
///
/// ```ignore
/// use discord::model::PermissionOverwriteType;
///
/// // Assuming that a `Discord` instance, channel, and role have already
/// // been previously defined.
/// let target = PermissionOverwriteType::Role(role.id);
/// let response = discord.delete_permission(channel.id, target);
/// ```
pub fn delete_permission(&self, channel: ChannelId, permission_type: PermissionOverwriteType) -> Result<()> {
let id = match permission_type {
PermissionOverwriteType::Member(id) => id.0,
PermissionOverwriteType::Role(id) => id.0,
};
self.request(|| self.client.delete(
&format!("{}/channels/{}/permissions/{}", API_BASE, channel.0, id)
)).and_then(check_empty)
}
/// Get the list of servers this user knows about.
pub fn get_servers(&self) -> Result<Vec<ServerInfo>> {
let response = try!(self.request(||
self.client.get(&format!("{}/users/@me/guilds", API_BASE))));
decode_array(try!(serde_json::from_reader(response)), ServerInfo::decode)
}
/// Create a new server with the given name.
pub fn create_server(&self, name: &str, region: &str, icon: Option<&str>) -> Result<Server> {
let map = ObjectBuilder::new()
.insert("name", name)
.insert("region", region)
.insert("icon", icon)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/guilds", API_BASE)).body(&body)));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Edit a server's information. See `EditServer` for the editable fields.
///
/// ```ignore
/// // Rename a server
/// discord.edit_server(server_id, |server| server.name("My Cool Server"));
/// // Edit many properties at once
/// discord.edit_server(server_id, |server| server
/// .name("My Cool Server")
/// .icon(Some("data:image/jpg;base64,..."))
/// .afk_timeout(300)
/// .region("us-south")
/// );
/// ```
pub fn edit_server<F: FnOnce(EditServer) -> EditServer>(&self, server_id: ServerId, f: F) -> Result<Server> {
let map = f(EditServer(ObjectBuilder::new())).0.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/guilds/{}", API_BASE, server_id.0)).body(&body)));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Leave the given server.
pub fn leave_server(&self, server: &ServerId) -> Result<Server> {
let response = try!(self.request(||
self.client.delete(&format!("{}/users/@me/guilds/{}", API_BASE, server.0))));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Delete the given server. Only available to the server owner.
pub fn delete_server(&self, server: &ServerId) -> Result<Server> {
let response = try!(self.request(||
self.client.delete(&format!("{}/guilds/{}", API_BASE, server.0))));
Server::decode(try!(serde_json::from_reader(response)))
}
/// Get the ban list for the given server.
pub fn get_bans(&self, server: &ServerId) -> Result<Vec<User>> {
let response = try!(self.request(||
self.client.get(&format!("{}/guilds/{}/bans", API_BASE, server.0))));
decode_array(try!(serde_json::from_reader(response)), User::decode_ban)
}
/// Ban a user from the server, optionally deleting their recent messages.
///
/// Zero may be passed for `delete_message_days` if no deletion is desired.
pub fn add_ban(&self, server: &ServerId, user: &UserId, delete_message_days: u32) -> Result<()> {
self.request(|| self.client.put(
&format!("{}/guilds/{}/bans/{}?delete_message_days={}", API_BASE, server.0, user.0, delete_message_days)
)).and_then(check_empty)
}
/// Unban a user from the server.
pub fn remove_ban(&self, server: &ServerId, user: &UserId) -> Result<()> {
self.request(|| self.client.delete(
&format!("{}/guilds/{}/bans/{}", API_BASE, server.0, user.0)
)).and_then(check_empty)
}
/// Extract information from an invite.
///
/// The invite should either be a URL of the form `http://discord.gg/CODE`,
/// or a string containing just the `CODE`.
pub fn get_invite(&self, invite: &str) -> Result<Invite> {
let invite = resolve_invite(invite);
let response = try!(self.request(||
self.client.get(&format!("{}/invite/{}", API_BASE, invite))));
Invite::decode(try!(serde_json::from_reader(response)))
}
/// Get the active invites for a server.
pub fn get_server_invites(&self, server: ServerId) -> Result<Vec<RichInvite>> {
let response = try!(self.request(||
self.client.get(&format!("{}/guilds/{}/invites", API_BASE, server.0))));
decode_array(try!(serde_json::from_reader(response)), RichInvite::decode)
}
/// Get the active invites for a channel.
pub fn get_channel_invites(&self, channel: ChannelId) -> Result<Vec<RichInvite>> {
let response = try!(self.request(||
self.client.get(&format!("{}/channels/{}/invites", API_BASE, channel.0))));
decode_array(try!(serde_json::from_reader(response)), RichInvite::decode)
}
/// Accept an invite. See `get_invite` for details.
pub fn accept_invite(&self, invite: &str) -> Result<Invite> {
let invite = resolve_invite(invite);
let response = try!(self.request(||
self.client.post(&format!("{}/invite/{}", API_BASE, invite))));
Invite::decode(try!(serde_json::from_reader(response)))
}
/// Create an invite to a channel.
///
/// Passing 0 for `max_age` or `max_uses` means no limit. `max_age` should
/// be specified in seconds.
pub fn create_invite(&self, channel: ChannelId,
max_age: u64, max_uses: u64,
temporary: bool
) -> Result<RichInvite> {
let map = ObjectBuilder::new()
.insert("validate", serde_json::Value::Null)
.insert("max_age", max_age)
.insert("max_uses", max_uses)
.insert("temporary", temporary)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/channels/{}/invites", API_BASE, channel.0)).body(&body)));
RichInvite::decode(try!(serde_json::from_reader(response)))
}
/// Delete an invite. See `get_invite` for details.
pub fn delete_invite(&self, invite: &str) -> Result<Invite> {
let invite = resolve_invite(invite);
let response = try!(self.request(||
self.client.delete(&format!("{}/invite/{}", API_BASE, invite))));
Invite::decode(try!(serde_json::from_reader(response)))
}
/// Retrieve a member object for a server given the member's user id.
pub fn get_member(&self, server: ServerId, user: UserId) -> Result<Member> {
let response = try!(self.request(|| self.client.get(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0))));
Member::decode(try!(serde_json::from_reader(response)))
}
/// Edit the list of roles assigned to a member of a server.
pub fn edit_member_roles(&self, server: &ServerId, user: &UserId, roles: &[RoleId]) -> Result<()> {
self.edit_member(*server, *user, |m| m.roles(roles))
}
/// Edit member information, including roles, nickname, and voice state.
///
/// See the `EditMember` struct for the editable fields.
pub fn edit_member<F: FnOnce(EditMember) -> EditMember>(&self, server: ServerId, user: UserId, f: F) -> Result<()> {
let map = f(EditMember(ObjectBuilder::new())).0.build();
let body = try!(serde_json::to_string(&map));
self.request(|| self.client.patch(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0)
).body(&body)).and_then(check_empty)
}
/// Kick a member from a server.
pub fn kick_member(&self, server: &ServerId, user: &UserId) -> Result<()> {
self.request(|| self.client.delete(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0)
)).and_then(check_empty)
}
// Create role
// Edit role
// Reorder roles
// Delete roles
/// Create a private channel with the given user, or return the existing
/// one if it exists.
pub fn create_private_channel(&self, recipient: &UserId) -> Result<PrivateChannel> {
let map = ObjectBuilder::new()
.insert("recipient_id", &recipient.0)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.post(&format!("{}/users/@me/channels", API_BASE)).body(&body)));
PrivateChannel::decode(try!(serde_json::from_reader(response)))
}
/// Get the URL at which a user's avatar is located.
pub fn get_user_avatar_url(&self, user: &UserId, avatar: &str) -> String {
format!("{}/users/{}/avatars/{}.jpg", API_BASE, user.0, avatar)
}
/// Download a user's avatar.
pub fn get_user_avatar(&self, user: &UserId, avatar: &str) -> Result<Vec<u8>> {
use std::io::Read;
let mut response = try!(retry(||
self.client.get(&self.get_user_avatar_url(user, avatar))));
let mut vec = Vec::new();
try!(response.read_to_end(&mut vec));
Ok(vec)
}
/// Edit the logged-in user's profile. See `EditProfile` for editable fields.
///
/// This method requires mutable access because editing the profile generates a new token.
pub fn edit_profile<F: FnOnce(EditProfile) -> EditProfile>(&mut self, f: F) -> Result<CurrentUser> {
// First, get the current profile, so that providing username and avatar is optional.
let response = try!(self.request(||
self.client.get(&format!("{}/users/@me", API_BASE))));
let user = try!(CurrentUser::decode(try!(serde_json::from_reader(response))));
let mut map = ObjectBuilder::new()
.insert("username", user.username)
.insert("avatar", user.avatar);
if let Some(email) = user.email.as_ref() {
map = map.insert("email", email);
}
// Then, send the profile patch.
let map = f(EditProfile(map)).0.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(||
self.client.patch(&format!("{}/users/@me", API_BASE)).body(&body)));
let mut json: BTreeMap<String, serde_json::Value> = try!(serde_json::from_reader(response));
// If a token was included in the response, switch to it. Important because if the
// password was changed, the old token is invalidated.
if let Some(serde_json::Value::String(token)) = json.remove("token") {
self.token = token;
}
CurrentUser::decode(serde_json::Value::Object(json))
}
/// Get the list of available voice regions for a server.
pub fn get_voice_regions(&self) -> Result<Vec<VoiceRegion>> {
let response = try!(self.request(|| self.client.get(&format!("{}/voice/regions", API_BASE))));
decode_array(try!(serde_json::from_reader(response)), VoiceRegion::decode)
}
/// Move a server member to another voice channel.
pub fn move_member_voice(&self, server: &ServerId, user: &UserId, channel: &ChannelId) -> Result<()> {
let map = ObjectBuilder::new()
.insert("channel_id", &channel.0)
.build();
let body = try!(serde_json::to_string(&map));
self.request(|| self.client.patch(
&format!("{}/guilds/{}/members/{}", API_BASE, server.0, user.0)
).body(&body)).and_then(check_empty)
}
/// Start a prune operation, kicking members who have been inactive for the
/// specified number of days. Members with a role assigned will never be
/// pruned.
pub fn begin_server_prune(&self, server: ServerId, days: u16) -> Result<ServerPrune> {
let map = ObjectBuilder::new()
.insert("days", days)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(|| self.client.post(
&format!("{}/guilds/{}/prune", API_BASE, server.0)).body(&body)));
ServerPrune::decode(try!(serde_json::from_reader(response)))
}
/// Get the number of members who have been inactive for the specified
/// number of days and would be pruned by a prune operation. Members with a
/// role assigned will never be pruned.
pub fn get_server_prune_count(&self, server: ServerId, days: u16) -> Result<ServerPrune> {
let map = ObjectBuilder::new()
.insert("days", days)
.build();
let body = try!(serde_json::to_string(&map));
let response = try!(self.request(|| self.client.get(
&format!("{}/guilds/{}/prune", API_BASE, server.0)).body(&body)));
ServerPrune::decode(try!(serde_json::from_reader(response)))
}
/// Sets a note for the user that is readable only to the currently logged
/// in user.
///
/// This endpoint is only available for users, and so does not work for
/// bots.
pub fn edit_note(&self, user: UserId, note: &str) -> Result<()> {
let map = ObjectBuilder::new()
.insert("note", note)
.build();
let body = try!(serde_json::to_string(&map));
self.request(|| self.client.put(
&format!("{}/users/@me/notes/{}", API_BASE, user.0)
).body(&body)).and_then(check_empty)
}
/// Retrieves information about the application and the owner.
pub fn get_application_info(&self) -> Result<ApplicationInfo> {
let response = try!(self.request(||
self.client.get(&format!("{}/oauth2/applications/@me", API_BASE))));
ApplicationInfo::decode(try!(serde_json::from_reader(response)))
}
/// Establish a websocket connection over which events can be received.
///
/// Also returns the `ReadyEvent` sent by Discord upon establishing the
/// connection, which contains the initial state as seen by the client.
///
/// See `connect_sharded` if you want to use guild sharding.
pub fn connect(&self) -> Result<(Connection, ReadyEvent)> {
self.__connect(None)
}
/// Establish a sharded websocket connection over which events can be
/// received.
///
/// The `shard_id` is indexed at 0 while `total_shards` is indexed at 1.
///
/// Also returns the `ReadyEvent` sent by Discord upon establishing the
/// connection, which contains the initial state as seen by the client.
///
/// See `connect` if you do not want to use guild sharding.
pub fn connect_sharded(&self, shard_id: u8, total_shards: u8) -> Result<(Connection, ReadyEvent)> {
self.__connect(Some([shard_id, total_shards]))
}
fn __connect(&self, shard_info: Option<[u8; 2]>) -> Result<(Connection, ReadyEvent)> {
let response = try!(self.request(|| self.client.get(&format!("{}/gateway", API_BASE))));
let value: BTreeMap<String, String> = try!(serde_json::from_reader(response));
let url = match value.get("url") {
Some(url) => url,
None => return Err(Error::Protocol("Response missing \"url\" in Discord::connect()"))
};
Connection::new(&url, &self.token, shard_info)
}
}
/// Read an image from a file into a string suitable for upload.
///
/// If the file's extension is `.png`, the claimed media type will be `image/png`, or `image/jpg`
/// otherwise. Note that Discord may convert the image to JPEG or another format after upload.
pub fn read_image<P: AsRef<::std::path::Path>>(path: P) -> Result<String> {
use std::io::Read;
let path = path.as_ref();
let mut vec = Vec::new();
try!(try!(std::fs::File::open(path)).read_to_end(&mut vec));
Ok(format!("data:image/{};base64,{}",
if path.extension() == Some("png".as_ref()) { "png" } else { "jpg" },
base64::encode(&vec),
))
}
/// Retrieves the active maintenance statuses.
pub fn get_active_maintenances() -> Result<Vec<Maintenance>> {
let client = hyper::Client::new();
let response = try!(retry(|| client.get(
&format!("{}/api/v2/scheduled-maintenances/active.json", STATUS_BASE))));
let mut json: BTreeMap<String, serde_json::Value> = try!(serde_json::from_reader(response));
match json.remove("scheduled_maintenances") {
Some(scheduled_maintenances) => decode_array(scheduled_maintenances, Maintenance::decode),
None => Ok(vec![]),
}
}
/// Retrieves the upcoming maintenance statuses.
pub fn get_upcoming_maintenances() -> Result<Vec<Maintenance>> {
let client = hyper::Client::new();
let response = try!(retry(|| client.get(
&format!("{}/api/v2/scheduled-maintenances/upcoming.json", STATUS_BASE))));
let mut json: BTreeMap<String, serde_json::Value> = try!(serde_json::from_reader(response));
match json.remove("scheduled_maintenances") {
Some(scheduled_maintenances) => decode_array(scheduled_maintenances, Maintenance::decode),
None => Ok(vec![]),
}
}
/// Argument to `get_messages` to specify the desired message retrieval.
pub enum GetMessages {
/// Get the N most recent messages.
MostRecent,
/// Get the first N messages before the specified message.
Before(MessageId),
/// Get the first N messages after the specified message.
After(MessageId),
/// Get N/2 messages before, N/2 messages after, and the specified message.
Around(MessageId),
}
/// Patch content for the `edit_server` call.
pub struct EditServer(ObjectBuilder);
impl EditServer {
/// Edit the server's name.
pub fn name(self, name: &str) -> Self {
EditServer(self.0.insert("name", name))
}
/// Edit the server's voice region.
pub fn region(self, region: &str) -> Self {
EditServer(self.0.insert("region", region))
}
/// Edit the server's icon. Use `None` to remove the icon.
pub fn icon(self, icon: Option<&str>) -> Self {
EditServer(match icon {
Some(icon) => self.0.insert("icon", icon),
None => self.0.insert("icon", serde_json::Value::Null),
})
}
/// Edit the server's AFK channel. Use `None` to select no AFK channel.
pub fn afk_channel(self, channel: Option<ChannelId>) -> Self {
EditServer(match channel {
Some(ch) => self.0.insert("afk_channel_id", ch.0),
None => self.0.insert("afk_channel_id", serde_json::Value::Null),
})
}
/// Edit the server's AFK timeout.
pub fn afk_timeout(self, timeout: u64) -> Self {
EditServer(self.0.insert("afk_timeout", timeout))
}
/// Transfer ownership of the server to a new owner.
pub fn owner(self, owner: UserId) -> Self {
EditServer(self.0.insert("owner_id", owner.0))
}
/// Edit the verification level of the server.
pub fn verification_level(self, verification_level: VerificationLevel) -> Self {
EditServer(self.0.insert("verification_level", verification_level.num()))
}
/// Edit the server's splash. Use `None` to remove the splash.
pub fn splash(self, splash: Option<&str>) -> Self {
EditServer(match splash {
Some(splash) => self.0.insert("splash", splash),
None => self.0.insert("splash", serde_json::Value::Null),
})
}
}
/// Patch content for the `edit_channel` call.
pub struct EditChannel(ObjectBuilder);
impl EditChannel {
/// Edit the channel's name.
pub fn name(self, name: &str) -> Self {
EditChannel(self.0.insert("name", name))
}
/// Edit the text channel's topic.
pub fn topic(self, topic: &str) -> Self {
EditChannel(self.0.insert("topic", topic))
}
/// Edit the channel's position in the list.
pub fn position(self, position: u64) -> Self {
EditChannel(self.0.insert("position", position))
}
/// Edit the voice channel's bitrate.
pub fn bitrate(self, bitrate: u64) -> Self {
EditChannel(self.0.insert("bitrate", bitrate))
}
/// Edit the voice channel's user limit. Both `None` and `Some(0)` mean "unlimited".
pub fn user_limit(self, user_limit: u64) -> Self {
EditChannel(self.0.insert("user_limit", user_limit))
}
}
/// Patch content for the `edit_member` call.
pub struct EditMember(ObjectBuilder);
impl EditMember {
/// Edit the member's nickname. Supply the empty string to remove a nickname.
pub fn nickname(self, nick: &str) -> Self {
EditMember(self.0.insert("nick", nick))
}
/// Edit whether the member is server-muted.
pub fn mute(self, mute: bool) -> Self {
EditMember(self.0.insert("mute", mute))
}
/// Edit whether the member is server-deafened.
pub fn deaf(self, deafen: bool) -> Self {
EditMember(self.0.insert("deaf", deafen))
}
/// Edit the member's assigned roles.
pub fn roles(self, roles: &[RoleId]) -> Self {
EditMember(self.0.insert_array("roles",
|ab| roles.iter().fold(ab, |ab, id| ab.push(id.0))))
}
/// Move the member to another voice channel.
pub fn channel(self, channel: ChannelId) -> Self {
EditMember(self.0.insert("channel_id", channel.0))
}
}
/// Patch content for the `edit_profile` call.
pub struct EditProfile(ObjectBuilder);
impl EditProfile {
/// Edit the user's username. Must be between 2 and 32 characters long.
pub fn username(self, username: &str) -> Self {
EditProfile(self.0.insert("username", username))
}
/// Edit the user's avatar. Use `None` to remove the avatar.
pub fn avatar(self, icon: Option<&str>) -> Self {
EditProfile(match icon {
Some(icon) => self.0.insert("avatar", icon),
None => self.0.insert("avatar", serde_json::Value::Null),
})
}
/// Provide the user's current password for authentication. Does not apply to bot accounts, and
/// must be supplied for user accounts.
pub fn password(self, password: &str) -> Self {
EditProfile(self.0.insert("password", password))
}
/// Edit the user's email address. Does not apply to bot accounts.
pub fn email(self, email: &str) -> Self {
EditProfile(self.0.insert("email", email))
}
/// Edit the user's password. Does not apply to bot accounts.
pub fn new_password(self, password: &str) -> Self {
EditProfile(self.0.insert("new_password", password))
}
}
fn retry<'a, F: Fn() -> hyper::client::RequestBuilder<'a>>(f: F) -> Result<hyper::client::Response> {
let f2 = || check_status(f()
.header(hyper::header::UserAgent(USER_AGENT.to_owned()))
.send());
// retry on a ConnectionAborted, which occurs if it's been a while since the last request
match f2() {
Err(Error::Hyper(hyper::error::Error::Io(ref io)))
if io.kind() == std::io::ErrorKind::ConnectionAborted => f2(),
other => other
}
}
#[inline]
fn check_status(response: hyper::Result<hyper::client::Response>) -> Result<hyper::client::Response> {
let response = try!(response);
if !response.status.is_success() {
return Err(Error::from_response(response))
}
Ok(response)
}
#[inline]
fn check_empty(mut response: hyper::client::Response) -> Result<()> {
if response.status != hyper::status::StatusCode::NoContent {
use std::io::Read;
debug!("Expected 204 No Content, got {}", response.status);
for header in response.headers.iter() {
debug!("Header: {}", header);
}
let mut content = String::new();
try!(response.read_to_string(&mut content));
debug!("Content: {}", content);
}
Ok(())
}
fn resolve_invite(invite: &str) -> &str {
if invite.starts_with("http://discord.gg/") {
&invite[18..]
} else if invite.starts_with("https://discord.gg/") {
&invite[19..]
} else if invite.starts_with("discord.gg/") {
&invite[11..]
} else {
invite
}
}
fn sleep_ms(millis: u64) {
std::thread::sleep(std::time::Duration::from_millis(millis))
}
// Timer that remembers when it is supposed to go off
struct Timer {
next_tick_at: time::Timespec,
tick_len: time::Duration,
}
#[cfg_attr(not(feature="voice"), allow(dead_code))]
impl Timer {
fn new(tick_len_ms: u64) -> Timer {
let tick_len = time::Duration::milliseconds(tick_len_ms as i64);
Timer {
next_tick_at: time::get_time() + tick_len,
tick_len: tick_len,
}
}
#[allow(dead_code)]
fn immediately(&mut self) {
self.next_tick_at = time::get_time();
}
fn defer(&mut self) {
self.next_tick_at = time::get_time() + self.tick_len;
}
fn check_tick(&mut self) -> bool {
time::get_time() >= self.next_tick_at && {
self.next_tick_at = self.next_tick_at + self.tick_len; true
}
}
fn sleep_until_tick(&mut self) {
let difference = self.next_tick_at - time::get_time();
if difference > time::Duration::zero() {
sleep_ms(difference.num_milliseconds() as u64)
}
self.next_tick_at = self.next_tick_at + self.tick_len;
}
}
trait ReceiverExt {
fn recv_json<F, T>(&mut self, decode: F) -> Result<T> where F: FnOnce(serde_json::Value) -> Result<T>;
}
trait SenderExt {
fn send_json(&mut self, value: &serde_json::Value) -> Result<()>;
}
impl ReceiverExt for websocket::client::Receiver<websocket::stream::WebSocketStream> {
fn recv_json<F, T>(&mut self, decode: F) -> Result<T> where F: FnOnce(serde_json::Value) -> Result<T> {
use websocket::message::{Message, Type};
use websocket::ws::receiver::Receiver;
let message: Message = try!(self.recv_message());
if message.opcode == Type::Close {
Err(Error::Closed(message.cd_status_code, String::from_utf8_lossy(&message.payload).into_owned()))
} else if message.opcode != Type::Text {
Err(Error::Closed(None, String::from_utf8_lossy(&message.payload).into_owned()))
} else {
serde_json::from_reader::<_, serde_json::Value>(&message.payload[..])
.map_err(From::from)
.and_then(decode)
.map_err(|e| {
warn!("Error decoding: {}", String::from_utf8_lossy(&message.payload));
e
})
}
}
}
impl SenderExt for websocket::client::Sender<websocket::stream::WebSocketStream> {
fn send_json(&mut self, value: &serde_json::Value) -> Result<()> {
use websocket::message::Message;
use websocket::ws::sender::Sender;
serde_json::to_string(value)
.map(Message::text)
.map_err(Error::from)
.and_then(|m| self.send_message(&m).map_err(Error::from))
}
}
mod internal {
pub enum Status {
SendMessage(::serde_json::Value),
Sequence(u64),
ChangeInterval(u64),
ChangeSender(::websocket::client::Sender<::websocket::stream::WebSocketStream>),
}
}
|
// Copyright 2015 Andre Bogus
// Licensed under the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>. This file may not be copied, modified,
// or distributed except according to those terms.
//! Space-efficient optional values
//!
//! Type `OptionBool` represents an optional boolean value, similar to
//! `Option<bool>`. Most function implementations are similar or equal.
//! Note that the `map_bool(..)` `and_bool(..)`, `and_then_bool(..)`,
//!`or_bool(..)` and `or_else_bool(..)` functions are working similar to the
//! methods without the `_bool` suffix, but require and return `OptionBool`
//! instead of `Option<bool>`. This allows people to stay within the type.
//!
//! The `OptionBool` type is expected to require only 1 byte of storage:
//!
//! ```
//! assert!(1 == std::mem::size_of::<optional::OptionBool>());
//! ```
//!
//! Then there is the `Optioned<T>` type which wraps a type `T` as an optional
//! value of `T` where one particular value represents None. `Optioned<T>`
//! requires the exact same space as T:
//!
//! ```
//! assert!(std::mem::size_of::<optional::Optioned<i64>>() ==
//! std::mem::size_of::<i64>());
//! assert!(std::mem::size_of::<optional::Optioned<f32>>() ==
//! std::mem::size_of::<f32>());
//! assert!(std::mem::size_of::<optional::Optioned<u8>>() ==
//! std::mem::size_of::<u8>());
//! ```
//!
//! There are implementations for `u8..64,usize` with `std::u..::MAX`
//! representing None, also for `i8..64,isize` with `std::i..::MIN`
//! representing None, and for `f32, f64` with `std::f..::NAN` representing
//! None.
//!
//! Using Optioned for your own types is as simple as implementing `Noned` for
//! your type, provided that your type is already Copy and Sized.
#![deny(missing_docs)]
#[cfg(feature = "serde")]
extern crate serde;
use std::slice::Iter;
use std::cmp::Ordering;
use std::convert::From;
use std::iter::Iterator;
use std::mem;
use std::ops::{Deref, Index, RangeFull};
use std::fmt::{self, Debug, Error};
use std::hash::{Hash, Hasher};
use self::OptionBool::*;
/// The `OptionBool` type, a space-efficient Option<bool> replacement
#[derive(Copy, Clone, PartialEq, Eq, Ord, Hash)]
pub enum OptionBool {
/// Some(true)
SomeTrue,
/// Some(false)
SomeFalse,
/// None
None,
}
// Deref
// we use this for Deref implementation. As they are constant, we obviously
// cannot implement DerefMut.
const OB_SOME_TRUE: Option<bool> = Option::Some(true);
const OB_SOME_FALSE: Option<bool> = Option::Some(false);
const OB_NONE: Option<bool> = Option::None;
const OB_SOME_TRUE_REF: &'static Option<bool> = &OB_SOME_TRUE;
const OB_SOME_FALSE_REF: &'static Option<bool> = &OB_SOME_FALSE;
const OB_NONE_REF: &'static Option<bool> = &OB_NONE;
/// We can deref-coerce to `Option<bool>`
impl Deref for OptionBool {
type Target = Option<bool>;
#[inline]
fn deref(&self) -> &'static Option<bool> {
match *self {
SomeTrue => OB_SOME_TRUE_REF,
SomeFalse => OB_SOME_FALSE_REF,
None => OB_NONE_REF,
}
}
}
impl<'a> PartialEq<OptionBool> for &'a OptionBool {
#[inline]
fn eq(&self, other: &OptionBool) -> bool {
match (*self, other) {
(&SomeTrue, &SomeTrue) | (&SomeFalse, &SomeFalse) | (&None, &None) => true,
_ => false,
}
}
}
/// Index for `RangeFull` (to slice)
impl Index<RangeFull> for OptionBool {
type Output = [bool];
#[inline]
fn index<'a>(&'a self, _: RangeFull) -> &'static [bool] {
match *self {
SomeTrue => OB_TRUE_SLICE_REF,
SomeFalse => OB_FALSE_SLICE_REF,
None => OB_EMPTY_SLICE_REF,
}
}
}
/// Some(true) > Some(false) > None
impl PartialOrd for OptionBool {
#[inline]
fn partial_cmp(&self, other: &OptionBool) -> Option<Ordering> {
match (self, other) {
(&SomeTrue, &SomeTrue) | (&SomeFalse, &SomeFalse) | (&None, &None) => {
Option::Some(Ordering::Equal)
}
(&SomeTrue, &SomeFalse) | (&SomeTrue, &None) | (&SomeFalse, &None) => {
Option::Some(Ordering::Greater)
}
_ => Option::Some(Ordering::Less),
}
}
}
static OB_TRUE_SLICE: [bool; 1] = [true];
static OB_FALSE_SLICE: [bool; 1] = [false];
static OB_EMPTY_SLICE: [bool; 0] = [];
static OB_TRUE_SLICE_REF: &'static [bool] = &OB_TRUE_SLICE;
static OB_FALSE_SLICE_REF: &'static [bool] = &OB_FALSE_SLICE;
static OB_EMPTY_SLICE_REF: &'static [bool] = &OB_EMPTY_SLICE;
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for OptionBool {
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::OptionBool::SomeTrue;
///# fn main() {
/// assert_eq!("true", serde_json::to_string(&SomeTrue).unwrap());
///# }
/// ```
fn deserialize<D>(deserializer: D) -> Result<OptionBool, D::Error>
where
D: serde::Deserializer<'de>,
{
Option::<bool>::deserialize(deserializer).map(OptionBool::from)
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for OptionBool {
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::OptionBool::SomeTrue;
///# fn main() {
/// assert_eq!(SomeTrue, serde_json::from_str("true").unwrap());
///# }
/// ```
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
Option::<bool>::from(*self).serialize(serializer)
}
}
impl OptionBool {
/// Create a SomeTrue for true, SomeFalse for false
#[inline]
pub fn some(b: bool) -> Self {
if b {
SomeTrue
} else {
SomeFalse
}
}
/// Create a None value.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::none(), optional::OptionBool::None);
/// ```
#[inline]
pub fn none() -> Self {
None
}
/// Returns true if the option is a Some value.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.is_some());
/// assert!(OptionBool::SomeFalse.is_some());
/// assert!(!OptionBool::None.is_some());
/// ```
#[inline]
pub fn is_some(&self) -> bool {
if let OptionBool::None = *self {
false
} else {
true
}
}
/// Returns true if the option is a Some value.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(!OptionBool::SomeTrue.is_none());
/// assert!(!OptionBool::SomeFalse.is_none());
/// assert!(OptionBool::None.is_none());
/// ```
#[inline]
pub fn is_none(&self) -> bool {
if let OptionBool::None = *self {
true
} else {
false
}
}
/// Unwraps the contained bool, panics on None with given message.
///
/// # Panics
///
/// if self is None
///
/// # Examples
///
/// For SomeTrue/SomeFalse, the corresponding bool is returned.
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.expect("FAIL"));
/// assert!(!OptionBool::SomeFalse.expect("FAIL"));
/// ```
///
/// On None, it panics with the given message.
///
/// ```should_panic
///# use optional::OptionBool;
/// OptionBool::None.expect("FAIL"); // panics with FAIL
/// ```
#[inline]
pub fn expect(&self, msg: &str) -> bool {
match *self {
SomeTrue => true,
SomeFalse => false,
None => panic!("{}", msg),
}
}
/// Unwraps the contained bool, panics on None.
///
/// # Panics
///
/// if self is None
///
/// # Examples
///
/// For SomeTrue/SomeFalse, the corresponding bool is returned.
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.unwrap());
/// assert!(!OptionBool::SomeFalse.unwrap());
/// ```
///
/// On None, it panics with "unwrap called on None"
///
/// ```should_panic
///# use optional::OptionBool;
/// OptionBool::None.unwrap(); // panics
/// ```
#[inline]
pub fn unwrap(&self) -> bool {
self.expect("unwrap called on None")
}
/// Returns the contained bool or a default.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.unwrap_or(false));
/// assert!(!OptionBool::SomeFalse.unwrap_or(true));
/// assert!(OptionBool::None.unwrap_or(true));
/// assert!(!OptionBool::None.unwrap_or(false));
/// ```
#[inline]
pub fn unwrap_or(&self, def: bool) -> bool {
match *self {
SomeTrue => true,
SomeFalse => false,
None => def,
}
}
/// Returns the contained bool or a computed default.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.unwrap_or_else(|| false));
/// assert!(!OptionBool::SomeFalse.unwrap_or_else(|| panic!()));
/// assert!(OptionBool::None.unwrap_or_else(|| true));
/// ```
#[inline]
pub fn unwrap_or_else<F>(self, f: F) -> bool
where
F: FnOnce() -> bool,
{
match self {
SomeTrue => true,
SomeFalse => false,
None => f(),
}
}
/// Maps an `OptionBool` to an `Option<U>` by applying the function
/// over the contained bool.
///
/// Note that there is also [`map_bool(..)`](#method.map_bool) which works
/// similarly, but returns another `OptionBool`.
///
/// # Examples
///
/// Convert the contained bool to a Yes/No message
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some("Yes"), OptionBool::SomeTrue.map(
/// |b| if b { "Yes" } else { "No" }));
/// ```
#[inline]
pub fn map<U, F>(self, f: F) -> Option<U>
where
F: FnOnce(bool) -> U,
{
match self {
SomeTrue => Option::Some(f(true)),
SomeFalse => Option::Some(f(false)),
None => Option::None,
}
}
/// Maps an `OptionBool` to another `OptionBool` by applying the
/// function over the contained bool.
///
/// Note that there is also [`map(..)`](#method.map) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// Invert the contained `bool`
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::SomeFalse.map_bool(|b| !b));
/// ```
#[inline]
pub fn map_bool<F>(self, f: F) -> OptionBool
where
F: FnOnce(bool) -> bool,
{
match self {
SomeTrue => if f(true) {
SomeTrue
} else {
SomeFalse
},
SomeFalse => if f(false) {
SomeTrue
} else {
SomeFalse
},
None => None,
}
}
/// Maps a value to a `U` by applying the function or return a
/// default `U`.
///
/// # Examples
///
/// Map to a string (as per the daily wtf's boolean definition):
///
/// ```
///# use optional::OptionBool;
/// assert_eq!("True", OptionBool::SomeTrue.map_or("FileNotFound",
/// |b| if b { "True" } else { "False" }));
/// ```
#[inline]
pub fn map_or<U, F>(self, default: U, f: F) -> U
where
F: FnOnce(bool) -> U,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => default,
}
}
/// Maps a value to a `U` by applying the function or return a
/// computed default.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!("True", OptionBool::SomeTrue.map_or_else(|| "FileNotFound",
/// |b| if b { "True" } else { "False" }));
/// ```
#[inline]
pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
where
D: FnOnce() -> U,
F: FnOnce(bool) -> U,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => default(),
}
}
/// Transforms the `OptionBool` into a `Result<bool, E>`, mapping
/// `Some`X to `Ok(`X`)` and `None` to `Err(err)`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeTrue.ok_or("Ouch"), Ok(true));
/// assert_eq!(OptionBool::None.ok_or("Ouch"), Err("Ouch"));
/// ```
#[inline]
pub fn ok_or<E>(self, err: E) -> Result<bool, E> {
match self {
SomeTrue => Ok(true),
SomeFalse => Ok(false),
None => Err(err),
}
}
/// Transforms the `OptionBool` into a `Result<bool, E>`, mapping `Some`X to
/// `Ok(`X`)` and `None` to a calculated `Err(err)`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
///# fn something_expensive() -> bool { unimplemented!(); }
/// assert_eq!(OptionBool::SomeTrue.ok_or_else(|| something_expensive()), Ok(true));
/// assert_eq!(OptionBool::None.ok_or_else(|| "Ouch"), Err("Ouch"));
/// ```
#[inline]
pub fn ok_or_else<E, F>(self, err: F) -> Result<bool, E>
where
F: FnOnce() -> E,
{
match self {
SomeTrue => Ok(true),
SomeFalse => Ok(false),
None => Err(err()),
}
}
/// Returns `None` if the option is `None`, otherwise returns `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some(1), OptionBool::SomeTrue.and(Some(1)));
/// assert_eq!(None, OptionBool::None.and(Some(1)));
/// let actual : Option<u8> = None;
/// assert_eq!(None, OptionBool::SomeTrue.and(actual));
/// ```
#[inline]
pub fn and<U>(self, optb: Option<U>) -> Option<U> {
match self {
SomeTrue | SomeFalse => optb,
None => Option::None,
}
}
/// Returns `None` if the option is `None`, otherwise returns `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::SomeFalse.and_bool(OptionBool::SomeTrue));
/// assert_eq!(OptionBool::None,
/// OptionBool::None.and_bool(OptionBool::SomeFalse));
/// assert_eq!(OptionBool::None,
/// OptionBool::SomeTrue.and_bool(OptionBool::None));
/// ```
#[inline]
pub fn and_bool(self, optb: OptionBool) -> OptionBool {
match self {
None => None,
_ => optb,
}
}
/// returns `None` if the `OptionBool` is `None`, otherwise calls `f` with
/// the boolean value and returns the result as an `Option<U>`
///
/// Note that there is also [`and_then_bool(..)`](#method.and_then_bool)
/// which works similarly, but returns another `OptionBool`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(None, OptionBool::SomeFalse.and_then(
/// |x| if x { Some(true) } else { None }));
/// ```
#[inline]
pub fn and_then<U, F>(self, f: F) -> Option<U>
where
F: FnOnce(bool) -> Option<U>,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => Option::None,
}
}
/// returns `None` if the `OptionBool` is `None`, otherwise calls `f` with
/// the boolean value and returns the result as an `OptionBool`
///
/// Note that there is also [`and_then(..)`](#method.and_then) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::None, OptionBool::SomeFalse.and_then_bool(
/// |x| if x { OptionBool::SomeTrue } else { OptionBool::None }));
/// ```
#[inline]
pub fn and_then_bool<F>(self, f: F) -> OptionBool
where
F: FnOnce(bool) -> OptionBool,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => None,
}
}
/// Returns this as Option unless this is `None`, in which case returns
/// `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some(false), OptionBool::SomeFalse.or(Some(true)));
/// assert_eq!(Some(true), OptionBool::None.or(Some(true)));
/// assert_eq!(None, OptionBool::None.or(None));
/// ```
#[inline]
pub fn or(self, optb: Option<bool>) -> Option<bool> {
match self {
SomeTrue => Some(true),
SomeFalse => Some(false),
None => optb,
}
}
/// Returns this as Option unless this is `None`, in which case returns
/// `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeFalse,
/// OptionBool::SomeFalse.or_bool(OptionBool::SomeTrue));
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::None.or_bool(OptionBool::SomeTrue));
/// assert_eq!(OptionBool::None,
/// OptionBool::None.or_bool(OptionBool::None));
/// ```
#[inline]
pub fn or_bool(self, optb: OptionBool) -> OptionBool {
match self {
None => optb,
x => x,
}
}
/// Returns this as Option unless this is `None`, in which case use the
/// supplied function to calculate the result.
///
/// Note that there is also [`or_else_bool(..)`](#method.or_else_bool)
/// which works similarly, but returns another `OptionBool`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some(false), OptionBool::SomeFalse.or_else(|| Some(true)));
/// assert_eq!(Some(true), OptionBool::None.or_else(|| Some(true)));
/// assert_eq!(None, OptionBool::None.or_else(|| None));
/// ```
#[inline]
pub fn or_else<F>(self, f: F) -> Option<bool>
where
F: FnOnce() -> Option<bool>,
{
match self {
SomeTrue => Option::Some(true),
SomeFalse => Option::Some(false),
None => f(),
}
}
/// Returns this as Option unless this is `None`, in which case use the
/// supplied function to calculate the result.
///
/// Note that there is also [`or_else(..)`](#method.or_else) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeFalse,
/// OptionBool::SomeFalse.or_else_bool(|| OptionBool::SomeTrue));
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::None.or_else_bool(|| OptionBool::SomeTrue));
/// assert_eq!(OptionBool::None,
/// OptionBool::None.or_else_bool(|| OptionBool::None));
/// ```
#[inline]
pub fn or_else_bool<F>(self, f: F) -> OptionBool
where
F: FnOnce() -> OptionBool,
{
match self {
None => f(),
x => x,
}
}
/// return an iterator over all contained (that is zero or one) values.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(None, OptionBool::None.iter().next());
/// assert_eq!(Some(&true), OptionBool::SomeTrue.iter().next());
/// ```
#[inline]
pub fn iter(&self) -> Iter<bool> {
self.as_slice().iter()
}
/// return a possibly empty slice with the contained value, if any.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(&[true], OptionBool::SomeTrue.as_slice());
/// assert!(OptionBool::None.as_slice().is_empty());
/// ```
#[inline]
pub fn as_slice(self) -> &'static [bool] {
match self {
SomeTrue => OB_TRUE_SLICE_REF,
SomeFalse => OB_FALSE_SLICE_REF,
None => OB_EMPTY_SLICE_REF,
}
}
/// Takes the value out of the `OptionBool` and returns ist as
/// `Option<bool>`, changing self to `None`.
///
/// Note that there is also [`take_bool(..)`](#method.take_bool) which
/// works similarly, but returns an `OptionBool`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// let mut x = OptionBool::some(true);
/// assert_eq!(Some(true), x.take());
/// assert_eq!(OptionBool::None, x);
/// ```
#[inline]
pub fn take(&mut self) -> Option<bool> {
self.take_bool().into()
}
/// Takes the value out of the `OptionBool`, changing self to `None`.
///
/// Note that there is also [`take(..)`](#method.take) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// let mut x = OptionBool::some(true);
/// assert_eq!(OptionBool::some(true), x.take_bool());
/// assert_eq!(OptionBool::None, x);
/// ```
#[inline]
pub fn take_bool(&mut self) -> OptionBool {
mem::replace(self, None)
}
}
impl Debug for OptionBool {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), Error> {
write!(
f,
"{}",
match *self {
SomeTrue => "Some(true)",
SomeFalse => "Some(false)",
None => "None",
}
)
}
}
///iterate over an `OptionBool`
pub struct IterBool {
o: OptionBool,
}
impl Iterator for IterBool {
type Item = bool;
#[inline]
fn next(&mut self) -> Option<bool> {
self.o.take()
}
}
/// `IntoIterator` works as expected
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// let mut pass : bool = false;
/// for b in OptionBool::SomeTrue { pass = b; }
/// assert!(pass);
///
/// for b in OptionBool::None { assert!(false); }
/// ```
impl IntoIterator for OptionBool {
type Item = bool;
type IntoIter = IterBool;
#[inline]
fn into_iter(self) -> IterBool {
IterBool { o: self }
}
}
/// `OptionBool` defaults to `None`.
impl Default for OptionBool {
#[inline]
fn default() -> OptionBool {
None
}
}
impl From<OptionBool> for Option<bool> {
#[inline]
fn from(o: OptionBool) -> Option<bool> {
match o {
SomeTrue => Option::Some(true),
SomeFalse => Option::Some(false),
None => Option::None,
}
}
}
impl<'a> From<&'a OptionBool> for Option<bool> {
#[inline]
fn from(o: &'a OptionBool) -> Option<bool> {
match *o {
SomeTrue => Option::Some(true),
SomeFalse => Option::Some(false),
None => Option::None,
}
}
}
impl From<Option<bool>> for OptionBool {
#[inline]
fn from(o: Option<bool>) -> Self {
match o {
Option::Some(true) => SomeTrue,
Option::Some(false) => SomeFalse,
Option::None => None,
}
}
}
impl<'a> From<&'a Option<bool>> for OptionBool {
#[inline]
fn from(o: &'a Option<bool>) -> Self {
match *o {
Option::Some(true) => SomeTrue,
Option::Some(false) => SomeFalse,
Option::None => None,
}
}
}
/// A trait whose implementation for any type `T` allows the use of
///`Optioned<T>` where `T` is bound by both `Sized` and `Copy`.
pub trait Noned {
/// Returns `true` if the contained value is the declared `None` for `T`,
/// `false` otherwise.
fn is_none(&self) -> bool;
/// Returns the declared `None` value for `T`.
fn get_none() -> Self;
}
impl Noned for u8 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u8::MAX
}
#[inline]
fn get_none() -> u8 {
std::u8::MAX
}
}
impl Noned for u16 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u16::MAX
}
#[inline]
fn get_none() -> u16 {
std::u16::MAX
}
}
impl Noned for u32 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u32::MAX
}
#[inline]
fn get_none() -> u32 {
std::u32::MAX
}
}
impl Noned for u64 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u64::MAX
}
#[inline]
fn get_none() -> u64 {
std::u64::MAX
}
}
impl Noned for usize {
#[inline]
fn is_none(&self) -> bool {
self == &std::usize::MAX
}
#[inline]
fn get_none() -> usize {
std::usize::MAX
}
}
impl Noned for i8 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i8::MIN
}
#[inline]
fn get_none() -> i8 {
std::i8::MIN
}
}
impl Noned for i16 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i16::MIN
}
#[inline]
fn get_none() -> i16 {
std::i16::MIN
}
}
impl Noned for i32 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i32::MIN
}
#[inline]
fn get_none() -> i32 {
std::i32::MIN
}
}
impl Noned for i64 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i64::MIN
}
#[inline]
fn get_none() -> i64 {
std::i64::MIN
}
}
impl Noned for isize {
#[inline]
fn is_none(&self) -> bool {
self == &std::isize::MIN
}
#[inline]
fn get_none() -> isize {
std::isize::MIN
}
}
impl Noned for f32 {
#[inline]
fn is_none(&self) -> bool {
self.is_nan()
}
#[inline]
fn get_none() -> f32 {
std::f32::NAN
}
}
impl Noned for f64 {
#[inline]
fn is_none(&self) -> bool {
self.is_nan()
}
#[inline]
fn get_none() -> f64 {
std::f64::NAN
}
}
impl Noned for char {
#[inline]
fn is_none(&self) -> bool { *self == '\0' }
#[inline]
fn get_none() -> char { '\0' }
}
///Equality within Optioned
pub trait OptEq {
/// Is the other optioned equal to this one?
#[inline]
fn opt_eq(&self, other: &Self) -> bool;
}
impl OptEq for u8 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for u16 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for u32 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for u64 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for usize {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i8 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i16 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i32 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i64 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for isize {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for f32 {
fn opt_eq(&self, other: &Self) -> bool {
if self.is_nan() {
other.is_nan()
} else {
self == other
}
}
}
impl OptEq for f64 {
fn opt_eq(&self, other: &Self) -> bool {
if self.is_nan() {
other.is_nan()
} else {
self == other
}
}
}
impl OptEq for char {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
///Ordering within Optioned
pub trait OptOrd {
/// compare this Optioned with another
#[inline]
fn opt_cmp(&self, other: &Self) -> Ordering;
}
#[inline]
fn _opt_cmp<T: Ord + Copy + Noned>(a: &T, b: &T) -> Ordering {
if a.is_none() {
if b.is_none() {
Ordering::Equal
} else {
Ordering::Less
}
} else if b.is_none() {
Ordering::Greater
} else {
a.cmp(b)
}
}
#[inline]
fn _opt_cmp_part<T: PartialOrd + Copy + Noned>(a: &T, b: &T) -> Ordering {
if a.is_none() {
if b.is_none() {
Ordering::Equal
} else {
Ordering::Less
}
} else if b.is_none() {
Ordering::Greater
} else {
a.partial_cmp(b).unwrap()
}
}
impl OptOrd for u8 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for u16 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for u32 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for u64 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for usize {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for i8 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for i16 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for i32 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for i64 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for isize {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for f32 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp_part(self, other)
}
}
impl OptOrd for f64 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp_part(self, other)
}
}
impl OptOrd for char {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
/// An `Option<T>`-like structure that takes only as much space as the enclosed
/// value, at the cost of removing one particular `None` value from the value
/// domain (see `Noned`)
#[derive(Copy, Clone)]
pub struct Optioned<T: Noned + Copy> {
value: T,
}
/// Equality works as usual.
///
/// # Examples
///
/// ```
///# use ::optional::{some, none};
/// assert_eq!(some(1u8), some(1u8));
/// assert_eq!(none::<u32>(), none::<u32>());
/// ```
impl<T> PartialEq for Optioned<T>
where
T: OptEq + Noned + Copy,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
self.value.opt_eq(&other.value)
}
}
impl<T> Eq for Optioned<T>
where
T: OptEq + Noned + Copy,
{
}
impl<T> PartialOrd for Optioned<T>
where
T: PartialEq + OptEq + OptOrd + Noned + Copy,
{
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.value.opt_cmp(&other.value))
}
}
impl<T> Ord for Optioned<T>
where
T: Eq + OptEq + OptOrd + Noned + Copy,
{
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.value.opt_cmp(&other.value)
}
}
impl<T> Hash for Optioned<T>
where
T: Noned + Copy + Hash,
{
#[inline]
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.value.hash(state)
}
}
#[cfg(feature = "serde")]
impl<'de, T> serde::Deserialize<'de> for Optioned<T>
where
T: Noned + Copy + serde::Deserialize<'de>,
{
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::{Optioned, Noned, some};
///# fn main() {
/// assert_eq!("1.0", serde_json::to_string(&some(1f32)).unwrap());
///# }
/// ```
fn deserialize<D>(deserializer: D) -> Result<Optioned<T>, D::Error>
where
D: serde::Deserializer<'de>,
{
Option::<T>::deserialize(deserializer).map(Optioned::from)
}
}
#[cfg(feature = "serde")]
impl<T> serde::Serialize for Optioned<T>
where
T: Noned + Copy + serde::Serialize,
{
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::{Optioned, Noned, some};
///# fn main() {
/// assert_eq!(some(1f32), serde_json::from_str("1.0").unwrap());
///# }
/// ```
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.as_option().serialize(serializer)
}
}
mod slice_of_up_to_one {
/// Get a slice of zero or one elements from a ref to the single value and a bool whether
/// the value should be included
///
/// note: This is safe because:
///
/// ```rust
/// assert_eq!(0, false as usize); // empty slice
/// assert_eq!(1, true as usize); // slice of one element
/// ```
#[inline]
pub fn slice_of<T>(value: &T, one: bool) -> &[T] {
unsafe { ::std::slice::from_raw_parts(value, one as usize) }
}
}
impl<T: Noned + Copy> Optioned<T> {
/// Create an `Optioned<T>` that is `some(t)`.
///
/// # Panics
///
/// panics if the supplied value is the None value
///
/// # Examples
///
/// ```
///# use ::optional::Optioned;
/// Optioned::<i32>::some(1); // Optioned(1)
/// ```
///
/// ```should_panic
///# use ::optional::Optioned;
/// Optioned::<f64>::some(std::f64::NAN); // panic!s
/// ```
#[inline]
pub fn some(t: T) -> Self {
assert!(!t.is_none());
Optioned::<T> { value: t }
}
/// Create an `Optioned<T>` that is `none()`.
///
/// # Examples
///
/// ```
///# use ::optional::Optioned;
/// Optioned::<u16>::none(); // Optioned(std::u16::MAX)
/// ```
#[inline]
pub fn none() -> Self {
Optioned::<T> {
value: <T as Noned>::get_none(),
}
}
#[inline]
fn as_option(&self) -> Option<T> {
if self.value.is_none() {
Option::None
} else {
Option::Some(self.value)
}
}
/// Returns `true` if this `Optioned` is `None`, `false` otherwise.
#[inline]
pub fn is_none(&self) -> bool {
self.value.is_none()
}
/// Returns `true` if this `Optioned` contains a value, `false` otherwise.
#[inline]
pub fn is_some(&self) -> bool {
!self.value.is_none()
}
/// Unwraps the value, if any, else panics with the given message.
///
/// # Panics
///
/// if self is None
///
/// # Examples
///
/// For Some(_), the corresponding value is returned.
///
/// ```
///# use optional::Optioned;
/// assert_eq!(42u8, Optioned::some(42u8).expect("FAIL"));
/// ```
///
/// On None, it panics with the given message.
///
/// ```should_panic
///# use optional::Optioned;
///Optioned::<u8>::none().expect("FAIL"); // panics with FAIL
/// ```
#[inline]
pub fn expect(&self, msg: &str) -> T {
if self.is_none() {
panic!("{}", msg)
}
self.value
}
/// Unwraps the value, if any, else panics with "unwrap called on None".
///
/// # Panics
///
/// if self is `None`
///
/// # Examples
///
/// For `Some(_)`, the corresponding value is returned.
///
/// ```
///# use optional::Optioned;
/// assert_eq!(42u8, Optioned::some(42u8).unwrap());
/// ```
///
/// On `None`, it panics with the given message.
///
/// ```should_panic
///# use optional::Optioned;
///Optioned::<u8>::none().unwrap(); // panics
/// ```
#[inline]
pub fn unwrap(&self) -> T {
self.expect("unwrap called on None")
}
/// Returns the contained value, even if None.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(-128i8, none().unpack());
/// assert_eq!(1u32, some(1).unpack());
/// ```
#[inline]
pub fn unpack(&self) -> T {
self.value
}
/// Returns the contained value or a default.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(-1i8, some(-1i8).unwrap_or(127i8));
/// assert_eq!(42u16, none().unwrap_or(42u16));
/// ```
#[inline]
pub fn unwrap_or(&self, def: T) -> T {
if self.is_none() {
def
} else {
self.value
}
}
/// Returns the contained value or a calculated default.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(-1i8, some(-1i8).unwrap_or_else(|| panic!()));
/// assert_eq!(42u16, none().unwrap_or_else(|| 42u16));
/// ```
#[inline]
pub fn unwrap_or_else<F>(self, f: F) -> T
where
F: FnOnce() -> T,
{
if self.is_none() {
f()
} else {
self.value
}
}
/// Maps the `Optioned` to an `Option<U>` by applying the function over the
/// contained value, if any.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!(Some(-42), some(42i8).map(|x| -x));
/// assert_eq!(None, none::<i8>().map(|x| -x));
/// ```
#[inline]
pub fn map<U, F>(self, f: F) -> Option<U>
where
F: FnOnce(T) -> U,
{
if self.is_none() {
Option::None
} else {
Option::Some(f(self.value))
}
}
/// Maps the `Optioned<T>` to an `Optioned<U>` by applying the function over
/// the contained value, if any. Requires that the result type of the
/// function be `Noned + Copy`, as other types aren't compatible with
/// Optioned.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!(some(-42), some(42i8).map_t(|x| -x));
/// assert_eq!(none::<i8>(), none::<i8>().map_t(|x| -x));
/// ```
#[inline]
pub fn map_t<U, F>(self, f: F) -> Optioned<U>
where
F: FnOnce(T) -> U,
U: Noned + Copy,
{
if self.is_none() {
none()
} else {
some(f(self.value))
}
}
/// Maps the contained value to a `U` by applying the function or return a
/// default.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!("1", some(1usize).map_or("Unknown".to_string(), |b| b.to_string()));
/// assert_eq!("Unknown", none::<usize>().map_or("Unknown".to_string(), |b| b.to_string()));
/// ```
#[inline]
pub fn map_or<U, F>(self, default: U, f: F) -> U
where
F: FnOnce(T) -> U,
{
if self.is_none() {
default
} else {
f(self.value)
}
}
/// Maps a value to a `U` by applying the function or return a computed
/// default.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!("1", some(1usize).map_or_else(|| "Unknown".to_string(),
/// |b| b.to_string()));
/// assert_eq!("Unknown", none::<usize>().map_or_else(
/// || "Unknown".to_string(), |b| b.to_string()));
/// ```
#[inline]
pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
where
D: FnOnce() -> U,
F: FnOnce(T) -> U,
{
if self.is_none() {
default()
} else {
f(self.value)
}
}
/// Returns this option if it contains a value, otherwise returns the other.
///
/// Arguments passed to `or` are eagerly evaluated;
/// if you are passing the result of a function call,
/// it is recommended to use `or_else`, which is lazily evaluated.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none};
/// let x = some(2);
/// let y = none();
/// assert_eq!(x.or(y), some(2));
///
/// let x = none();
/// let y = some(100);
/// assert_eq!(x.or(y), some(100));
///
/// let x = some(2);
/// let y = some(100);
/// assert_eq!(x.or(y), some(2));
///
/// let x: Optioned<u32> = none();
/// let y = none();
/// assert_eq!(x.or(y), none());
/// ```
#[inline]
pub fn or(self, other: Optioned<T>) -> Optioned<T> {
if self.is_some() {
self
} else {
other
}
}
/// Returns this option if it contains a value, otherwise calls `f` and returns the result.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none};
/// fn nothing() -> Optioned<u32> { none() }
/// fn something() -> Optioned<u32> { some(1) }
///
/// assert_eq!(some(2).or_else(something), some(2));
/// assert_eq!(none().or_else(something), some(1));
/// assert_eq!(none().or_else(nothing), none());
/// ```
pub fn or_else<F>(self, f: F) -> Optioned<T>
where
F: FnOnce() -> Optioned<T>,
{
if self.is_some() {
self
} else {
f()
}
}
/// Returns the `None` value for type `U` if this value or `other` contains their respective
/// `None` values. Otherwise returns the `other` `Optioned` struct.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none};
/// let the_other = some::<u32>(42);
///
/// assert_eq!(some('a').and(the_other), some(42));
/// assert_eq!(none::<char>().and(the_other), none::<u32>());
/// assert_eq!(some('a').and(none::<u32>()), none::<u32>());
/// assert_eq!(none::<char>().and(none::<u32>()), none::<u32>());
/// ```
pub fn and<U>(self, other: Optioned<U>) -> Optioned<U>
where
U: Noned + Copy
{
if self.is_some() {
other
} else {
none::<U>()
}
}
/// Returns this `Optioned` if it contains the the `None` value, otherwise calls `f` with
/// the contained value and returns the result as an `Optioned<U>`.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none, wrap};
/// fn nothing() -> Optioned<u32> { none() }
/// fn something() -> Optioned<u32> { some(1) }
/// fn add_two(val: u32) -> Optioned<u32> {
/// wrap( val + 2)
/// }
///
/// fn failed_function(val: u32) -> Optioned<u32> {
/// none()
/// }
///
/// assert_eq!(some(2).and_then(add_two), some(4));
/// assert_eq!(none().and_then(add_two), none());
/// assert_eq!(some(2).and_then(failed_function), none());
/// assert_eq!(none().and_then(failed_function), none());
/// ```
pub fn and_then<F,U>(self, f: F) -> Optioned<U>
where
F: FnOnce(T) -> Optioned<U>,
U: Noned + Copy
{
if self.is_some() {
f(self.value)
} else {
none()
}
}
/// Takes the value out of the `Optioned` and returns ist as
/// `Option<T>`, changing self to `None`.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// let mut x = some(1u8);
/// assert_eq!(Some(1u8), x.take());
/// assert!(x.is_none());
/// ```
#[inline]
pub fn take(&mut self) -> Option<T> {
mem::replace(self, Self::none()).as_option()
}
/// Return a possibly empty slice over the contained value, if any.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!(&[42], some(42u8).as_slice());
/// assert!(none::<i16>().as_slice().is_empty());
/// ```
#[inline]
pub fn as_slice(&self) -> &[T] {
slice_of_up_to_one::slice_of(&self.value, self.is_some())
}
/// return an iterator over all contained (that is zero or one) values.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(None, none::<u64>().iter().next());
/// assert_eq!(Some(42u64), some(42u64).iter().next());
/// ```
#[inline]
pub fn iter(&self) -> OptionedIter<T> {
OptionedIter { o: *self } // make a copy
}
}
/// Create an `Optioned<T>` that is `some(t)`.
///
/// # Panics
///
/// panics if the supplied value is the None value
///
/// # Examples
///
/// ```
///# use ::optional::some;
/// some(1i32); // Optioned(1i32)
/// ```
///
/// ```should_panic
///# use ::optional::some;
/// some(std::f64::NAN); // panic!s
/// ```
pub fn some<T: Noned + Copy>(value: T) -> Optioned<T> {
Optioned::<T>::some(value)
}
/// Create a `None Optioned<T>`. Note that the type must be inferrible
/// from the context, or you'd need to call with `::<T>` where `T` is
/// the specific type.
///
/// # Examples
///
/// ```
///# use ::optional::{none, Optioned};
/// let x : Optioned<i16> = none();
/// none::<f32>();
/// ```
pub fn none<T: Noned + Copy>() -> Optioned<T> {
Optioned::<T>::none()
}
/// Wrap a `T` into an `Optioned<T>`, regardless of its None-ness.
///
/// # Examples
///
/// ```
///# use optional::wrap;
/// assert!(wrap(1u8).is_some());
/// assert!(wrap(255u8).is_none());
/// ```
pub fn wrap<T: Noned + Copy>(v: T) -> Optioned<T> {
Optioned { value: v }
}
impl<T: Noned + Copy + Debug> Debug for Optioned<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), Error> {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "Some({:?})", &self.value)
}
}
}
impl<T: Noned + Copy> Default for Optioned<T> {
#[inline]
fn default() -> Optioned<T> {
none()
}
}
/// iterate over an Optioned<T>
#[derive(Copy, Clone)]
pub struct OptionedIter<T: Noned + Copy> {
o: Optioned<T>,
}
impl<T: Noned + Copy> Iterator for OptionedIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.o.take()
}
}
impl<'a, T: Noned + Copy> From<&'a Option<T>> for Optioned<T> {
#[inline]
fn from(o: &Option<T>) -> Optioned<T> {
o.map_or_else(Self::none, Self::some)
}
}
impl<T: Noned + Copy> From<Option<T>> for Optioned<T> {
#[inline]
fn from(o: Option<T>) -> Optioned<T> {
o.map_or_else(Self::none, Self::some)
}
}
impl<T: Noned + Copy> Into<Option<T>> for Optioned<T> {
#[inline]
fn into(self) -> Option<T> {
self.as_option()
}
}
impl<T: Noned + Copy> From<T> for Optioned<T> {
#[inline]
fn from(o: T) -> Optioned<T> {
wrap(o)
}
}
Added inline attributes.
// Copyright 2015 Andre Bogus
// Licensed under the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>. This file may not be copied, modified,
// or distributed except according to those terms.
//! Space-efficient optional values
//!
//! Type `OptionBool` represents an optional boolean value, similar to
//! `Option<bool>`. Most function implementations are similar or equal.
//! Note that the `map_bool(..)` `and_bool(..)`, `and_then_bool(..)`,
//!`or_bool(..)` and `or_else_bool(..)` functions are working similar to the
//! methods without the `_bool` suffix, but require and return `OptionBool`
//! instead of `Option<bool>`. This allows people to stay within the type.
//!
//! The `OptionBool` type is expected to require only 1 byte of storage:
//!
//! ```
//! assert!(1 == std::mem::size_of::<optional::OptionBool>());
//! ```
//!
//! Then there is the `Optioned<T>` type which wraps a type `T` as an optional
//! value of `T` where one particular value represents None. `Optioned<T>`
//! requires the exact same space as T:
//!
//! ```
//! assert!(std::mem::size_of::<optional::Optioned<i64>>() ==
//! std::mem::size_of::<i64>());
//! assert!(std::mem::size_of::<optional::Optioned<f32>>() ==
//! std::mem::size_of::<f32>());
//! assert!(std::mem::size_of::<optional::Optioned<u8>>() ==
//! std::mem::size_of::<u8>());
//! ```
//!
//! There are implementations for `u8..64,usize` with `std::u..::MAX`
//! representing None, also for `i8..64,isize` with `std::i..::MIN`
//! representing None, and for `f32, f64` with `std::f..::NAN` representing
//! None.
//!
//! Using Optioned for your own types is as simple as implementing `Noned` for
//! your type, provided that your type is already Copy and Sized.
#![deny(missing_docs)]
#[cfg(feature = "serde")]
extern crate serde;
use std::slice::Iter;
use std::cmp::Ordering;
use std::convert::From;
use std::iter::Iterator;
use std::mem;
use std::ops::{Deref, Index, RangeFull};
use std::fmt::{self, Debug, Error};
use std::hash::{Hash, Hasher};
use self::OptionBool::*;
/// The `OptionBool` type, a space-efficient Option<bool> replacement
#[derive(Copy, Clone, PartialEq, Eq, Ord, Hash)]
pub enum OptionBool {
/// Some(true)
SomeTrue,
/// Some(false)
SomeFalse,
/// None
None,
}
// Deref
// we use this for Deref implementation. As they are constant, we obviously
// cannot implement DerefMut.
const OB_SOME_TRUE: Option<bool> = Option::Some(true);
const OB_SOME_FALSE: Option<bool> = Option::Some(false);
const OB_NONE: Option<bool> = Option::None;
const OB_SOME_TRUE_REF: &'static Option<bool> = &OB_SOME_TRUE;
const OB_SOME_FALSE_REF: &'static Option<bool> = &OB_SOME_FALSE;
const OB_NONE_REF: &'static Option<bool> = &OB_NONE;
/// We can deref-coerce to `Option<bool>`
impl Deref for OptionBool {
type Target = Option<bool>;
#[inline]
fn deref(&self) -> &'static Option<bool> {
match *self {
SomeTrue => OB_SOME_TRUE_REF,
SomeFalse => OB_SOME_FALSE_REF,
None => OB_NONE_REF,
}
}
}
impl<'a> PartialEq<OptionBool> for &'a OptionBool {
#[inline]
fn eq(&self, other: &OptionBool) -> bool {
match (*self, other) {
(&SomeTrue, &SomeTrue) | (&SomeFalse, &SomeFalse) | (&None, &None) => true,
_ => false,
}
}
}
/// Index for `RangeFull` (to slice)
impl Index<RangeFull> for OptionBool {
type Output = [bool];
#[inline]
fn index<'a>(&'a self, _: RangeFull) -> &'static [bool] {
match *self {
SomeTrue => OB_TRUE_SLICE_REF,
SomeFalse => OB_FALSE_SLICE_REF,
None => OB_EMPTY_SLICE_REF,
}
}
}
/// Some(true) > Some(false) > None
impl PartialOrd for OptionBool {
#[inline]
fn partial_cmp(&self, other: &OptionBool) -> Option<Ordering> {
match (self, other) {
(&SomeTrue, &SomeTrue) | (&SomeFalse, &SomeFalse) | (&None, &None) => {
Option::Some(Ordering::Equal)
}
(&SomeTrue, &SomeFalse) | (&SomeTrue, &None) | (&SomeFalse, &None) => {
Option::Some(Ordering::Greater)
}
_ => Option::Some(Ordering::Less),
}
}
}
static OB_TRUE_SLICE: [bool; 1] = [true];
static OB_FALSE_SLICE: [bool; 1] = [false];
static OB_EMPTY_SLICE: [bool; 0] = [];
static OB_TRUE_SLICE_REF: &'static [bool] = &OB_TRUE_SLICE;
static OB_FALSE_SLICE_REF: &'static [bool] = &OB_FALSE_SLICE;
static OB_EMPTY_SLICE_REF: &'static [bool] = &OB_EMPTY_SLICE;
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for OptionBool {
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::OptionBool::SomeTrue;
///# fn main() {
/// assert_eq!("true", serde_json::to_string(&SomeTrue).unwrap());
///# }
/// ```
fn deserialize<D>(deserializer: D) -> Result<OptionBool, D::Error>
where
D: serde::Deserializer<'de>,
{
Option::<bool>::deserialize(deserializer).map(OptionBool::from)
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for OptionBool {
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::OptionBool::SomeTrue;
///# fn main() {
/// assert_eq!(SomeTrue, serde_json::from_str("true").unwrap());
///# }
/// ```
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
Option::<bool>::from(*self).serialize(serializer)
}
}
impl OptionBool {
/// Create a SomeTrue for true, SomeFalse for false
#[inline]
pub fn some(b: bool) -> Self {
if b {
SomeTrue
} else {
SomeFalse
}
}
/// Create a None value.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::none(), optional::OptionBool::None);
/// ```
#[inline]
pub fn none() -> Self {
None
}
/// Returns true if the option is a Some value.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.is_some());
/// assert!(OptionBool::SomeFalse.is_some());
/// assert!(!OptionBool::None.is_some());
/// ```
#[inline]
pub fn is_some(&self) -> bool {
if let OptionBool::None = *self {
false
} else {
true
}
}
/// Returns true if the option is a Some value.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(!OptionBool::SomeTrue.is_none());
/// assert!(!OptionBool::SomeFalse.is_none());
/// assert!(OptionBool::None.is_none());
/// ```
#[inline]
pub fn is_none(&self) -> bool {
if let OptionBool::None = *self {
true
} else {
false
}
}
/// Unwraps the contained bool, panics on None with given message.
///
/// # Panics
///
/// if self is None
///
/// # Examples
///
/// For SomeTrue/SomeFalse, the corresponding bool is returned.
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.expect("FAIL"));
/// assert!(!OptionBool::SomeFalse.expect("FAIL"));
/// ```
///
/// On None, it panics with the given message.
///
/// ```should_panic
///# use optional::OptionBool;
/// OptionBool::None.expect("FAIL"); // panics with FAIL
/// ```
#[inline]
pub fn expect(&self, msg: &str) -> bool {
match *self {
SomeTrue => true,
SomeFalse => false,
None => panic!("{}", msg),
}
}
/// Unwraps the contained bool, panics on None.
///
/// # Panics
///
/// if self is None
///
/// # Examples
///
/// For SomeTrue/SomeFalse, the corresponding bool is returned.
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.unwrap());
/// assert!(!OptionBool::SomeFalse.unwrap());
/// ```
///
/// On None, it panics with "unwrap called on None"
///
/// ```should_panic
///# use optional::OptionBool;
/// OptionBool::None.unwrap(); // panics
/// ```
#[inline]
pub fn unwrap(&self) -> bool {
self.expect("unwrap called on None")
}
/// Returns the contained bool or a default.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.unwrap_or(false));
/// assert!(!OptionBool::SomeFalse.unwrap_or(true));
/// assert!(OptionBool::None.unwrap_or(true));
/// assert!(!OptionBool::None.unwrap_or(false));
/// ```
#[inline]
pub fn unwrap_or(&self, def: bool) -> bool {
match *self {
SomeTrue => true,
SomeFalse => false,
None => def,
}
}
/// Returns the contained bool or a computed default.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert!(OptionBool::SomeTrue.unwrap_or_else(|| false));
/// assert!(!OptionBool::SomeFalse.unwrap_or_else(|| panic!()));
/// assert!(OptionBool::None.unwrap_or_else(|| true));
/// ```
#[inline]
pub fn unwrap_or_else<F>(self, f: F) -> bool
where
F: FnOnce() -> bool,
{
match self {
SomeTrue => true,
SomeFalse => false,
None => f(),
}
}
/// Maps an `OptionBool` to an `Option<U>` by applying the function
/// over the contained bool.
///
/// Note that there is also [`map_bool(..)`](#method.map_bool) which works
/// similarly, but returns another `OptionBool`.
///
/// # Examples
///
/// Convert the contained bool to a Yes/No message
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some("Yes"), OptionBool::SomeTrue.map(
/// |b| if b { "Yes" } else { "No" }));
/// ```
#[inline]
pub fn map<U, F>(self, f: F) -> Option<U>
where
F: FnOnce(bool) -> U,
{
match self {
SomeTrue => Option::Some(f(true)),
SomeFalse => Option::Some(f(false)),
None => Option::None,
}
}
/// Maps an `OptionBool` to another `OptionBool` by applying the
/// function over the contained bool.
///
/// Note that there is also [`map(..)`](#method.map) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// Invert the contained `bool`
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::SomeFalse.map_bool(|b| !b));
/// ```
#[inline]
pub fn map_bool<F>(self, f: F) -> OptionBool
where
F: FnOnce(bool) -> bool,
{
match self {
SomeTrue => if f(true) {
SomeTrue
} else {
SomeFalse
},
SomeFalse => if f(false) {
SomeTrue
} else {
SomeFalse
},
None => None,
}
}
/// Maps a value to a `U` by applying the function or return a
/// default `U`.
///
/// # Examples
///
/// Map to a string (as per the daily wtf's boolean definition):
///
/// ```
///# use optional::OptionBool;
/// assert_eq!("True", OptionBool::SomeTrue.map_or("FileNotFound",
/// |b| if b { "True" } else { "False" }));
/// ```
#[inline]
pub fn map_or<U, F>(self, default: U, f: F) -> U
where
F: FnOnce(bool) -> U,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => default,
}
}
/// Maps a value to a `U` by applying the function or return a
/// computed default.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!("True", OptionBool::SomeTrue.map_or_else(|| "FileNotFound",
/// |b| if b { "True" } else { "False" }));
/// ```
#[inline]
pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
where
D: FnOnce() -> U,
F: FnOnce(bool) -> U,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => default(),
}
}
/// Transforms the `OptionBool` into a `Result<bool, E>`, mapping
/// `Some`X to `Ok(`X`)` and `None` to `Err(err)`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeTrue.ok_or("Ouch"), Ok(true));
/// assert_eq!(OptionBool::None.ok_or("Ouch"), Err("Ouch"));
/// ```
#[inline]
pub fn ok_or<E>(self, err: E) -> Result<bool, E> {
match self {
SomeTrue => Ok(true),
SomeFalse => Ok(false),
None => Err(err),
}
}
/// Transforms the `OptionBool` into a `Result<bool, E>`, mapping `Some`X to
/// `Ok(`X`)` and `None` to a calculated `Err(err)`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
///# fn something_expensive() -> bool { unimplemented!(); }
/// assert_eq!(OptionBool::SomeTrue.ok_or_else(|| something_expensive()), Ok(true));
/// assert_eq!(OptionBool::None.ok_or_else(|| "Ouch"), Err("Ouch"));
/// ```
#[inline]
pub fn ok_or_else<E, F>(self, err: F) -> Result<bool, E>
where
F: FnOnce() -> E,
{
match self {
SomeTrue => Ok(true),
SomeFalse => Ok(false),
None => Err(err()),
}
}
/// Returns `None` if the option is `None`, otherwise returns `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some(1), OptionBool::SomeTrue.and(Some(1)));
/// assert_eq!(None, OptionBool::None.and(Some(1)));
/// let actual : Option<u8> = None;
/// assert_eq!(None, OptionBool::SomeTrue.and(actual));
/// ```
#[inline]
pub fn and<U>(self, optb: Option<U>) -> Option<U> {
match self {
SomeTrue | SomeFalse => optb,
None => Option::None,
}
}
/// Returns `None` if the option is `None`, otherwise returns `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::SomeFalse.and_bool(OptionBool::SomeTrue));
/// assert_eq!(OptionBool::None,
/// OptionBool::None.and_bool(OptionBool::SomeFalse));
/// assert_eq!(OptionBool::None,
/// OptionBool::SomeTrue.and_bool(OptionBool::None));
/// ```
#[inline]
pub fn and_bool(self, optb: OptionBool) -> OptionBool {
match self {
None => None,
_ => optb,
}
}
/// returns `None` if the `OptionBool` is `None`, otherwise calls `f` with
/// the boolean value and returns the result as an `Option<U>`
///
/// Note that there is also [`and_then_bool(..)`](#method.and_then_bool)
/// which works similarly, but returns another `OptionBool`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(None, OptionBool::SomeFalse.and_then(
/// |x| if x { Some(true) } else { None }));
/// ```
#[inline]
pub fn and_then<U, F>(self, f: F) -> Option<U>
where
F: FnOnce(bool) -> Option<U>,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => Option::None,
}
}
/// returns `None` if the `OptionBool` is `None`, otherwise calls `f` with
/// the boolean value and returns the result as an `OptionBool`
///
/// Note that there is also [`and_then(..)`](#method.and_then) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::None, OptionBool::SomeFalse.and_then_bool(
/// |x| if x { OptionBool::SomeTrue } else { OptionBool::None }));
/// ```
#[inline]
pub fn and_then_bool<F>(self, f: F) -> OptionBool
where
F: FnOnce(bool) -> OptionBool,
{
match self {
SomeTrue => f(true),
SomeFalse => f(false),
None => None,
}
}
/// Returns this as Option unless this is `None`, in which case returns
/// `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some(false), OptionBool::SomeFalse.or(Some(true)));
/// assert_eq!(Some(true), OptionBool::None.or(Some(true)));
/// assert_eq!(None, OptionBool::None.or(None));
/// ```
#[inline]
pub fn or(self, optb: Option<bool>) -> Option<bool> {
match self {
SomeTrue => Some(true),
SomeFalse => Some(false),
None => optb,
}
}
/// Returns this as Option unless this is `None`, in which case returns
/// `optb`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeFalse,
/// OptionBool::SomeFalse.or_bool(OptionBool::SomeTrue));
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::None.or_bool(OptionBool::SomeTrue));
/// assert_eq!(OptionBool::None,
/// OptionBool::None.or_bool(OptionBool::None));
/// ```
#[inline]
pub fn or_bool(self, optb: OptionBool) -> OptionBool {
match self {
None => optb,
x => x,
}
}
/// Returns this as Option unless this is `None`, in which case use the
/// supplied function to calculate the result.
///
/// Note that there is also [`or_else_bool(..)`](#method.or_else_bool)
/// which works similarly, but returns another `OptionBool`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(Some(false), OptionBool::SomeFalse.or_else(|| Some(true)));
/// assert_eq!(Some(true), OptionBool::None.or_else(|| Some(true)));
/// assert_eq!(None, OptionBool::None.or_else(|| None));
/// ```
#[inline]
pub fn or_else<F>(self, f: F) -> Option<bool>
where
F: FnOnce() -> Option<bool>,
{
match self {
SomeTrue => Option::Some(true),
SomeFalse => Option::Some(false),
None => f(),
}
}
/// Returns this as Option unless this is `None`, in which case use the
/// supplied function to calculate the result.
///
/// Note that there is also [`or_else(..)`](#method.or_else) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(OptionBool::SomeFalse,
/// OptionBool::SomeFalse.or_else_bool(|| OptionBool::SomeTrue));
/// assert_eq!(OptionBool::SomeTrue,
/// OptionBool::None.or_else_bool(|| OptionBool::SomeTrue));
/// assert_eq!(OptionBool::None,
/// OptionBool::None.or_else_bool(|| OptionBool::None));
/// ```
#[inline]
pub fn or_else_bool<F>(self, f: F) -> OptionBool
where
F: FnOnce() -> OptionBool,
{
match self {
None => f(),
x => x,
}
}
/// return an iterator over all contained (that is zero or one) values.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(None, OptionBool::None.iter().next());
/// assert_eq!(Some(&true), OptionBool::SomeTrue.iter().next());
/// ```
#[inline]
pub fn iter(&self) -> Iter<bool> {
self.as_slice().iter()
}
/// return a possibly empty slice with the contained value, if any.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// assert_eq!(&[true], OptionBool::SomeTrue.as_slice());
/// assert!(OptionBool::None.as_slice().is_empty());
/// ```
#[inline]
pub fn as_slice(self) -> &'static [bool] {
match self {
SomeTrue => OB_TRUE_SLICE_REF,
SomeFalse => OB_FALSE_SLICE_REF,
None => OB_EMPTY_SLICE_REF,
}
}
/// Takes the value out of the `OptionBool` and returns ist as
/// `Option<bool>`, changing self to `None`.
///
/// Note that there is also [`take_bool(..)`](#method.take_bool) which
/// works similarly, but returns an `OptionBool`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// let mut x = OptionBool::some(true);
/// assert_eq!(Some(true), x.take());
/// assert_eq!(OptionBool::None, x);
/// ```
#[inline]
pub fn take(&mut self) -> Option<bool> {
self.take_bool().into()
}
/// Takes the value out of the `OptionBool`, changing self to `None`.
///
/// Note that there is also [`take(..)`](#method.take) which works
/// similarly, but returns an `Option<bool>`.
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// let mut x = OptionBool::some(true);
/// assert_eq!(OptionBool::some(true), x.take_bool());
/// assert_eq!(OptionBool::None, x);
/// ```
#[inline]
pub fn take_bool(&mut self) -> OptionBool {
mem::replace(self, None)
}
}
impl Debug for OptionBool {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), Error> {
write!(
f,
"{}",
match *self {
SomeTrue => "Some(true)",
SomeFalse => "Some(false)",
None => "None",
}
)
}
}
///iterate over an `OptionBool`
pub struct IterBool {
o: OptionBool,
}
impl Iterator for IterBool {
type Item = bool;
#[inline]
fn next(&mut self) -> Option<bool> {
self.o.take()
}
}
/// `IntoIterator` works as expected
///
/// # Examples
///
/// ```
///# use optional::OptionBool;
/// let mut pass : bool = false;
/// for b in OptionBool::SomeTrue { pass = b; }
/// assert!(pass);
///
/// for b in OptionBool::None { assert!(false); }
/// ```
impl IntoIterator for OptionBool {
type Item = bool;
type IntoIter = IterBool;
#[inline]
fn into_iter(self) -> IterBool {
IterBool { o: self }
}
}
/// `OptionBool` defaults to `None`.
impl Default for OptionBool {
#[inline]
fn default() -> OptionBool {
None
}
}
impl From<OptionBool> for Option<bool> {
#[inline]
fn from(o: OptionBool) -> Option<bool> {
match o {
SomeTrue => Option::Some(true),
SomeFalse => Option::Some(false),
None => Option::None,
}
}
}
impl<'a> From<&'a OptionBool> for Option<bool> {
#[inline]
fn from(o: &'a OptionBool) -> Option<bool> {
match *o {
SomeTrue => Option::Some(true),
SomeFalse => Option::Some(false),
None => Option::None,
}
}
}
impl From<Option<bool>> for OptionBool {
#[inline]
fn from(o: Option<bool>) -> Self {
match o {
Option::Some(true) => SomeTrue,
Option::Some(false) => SomeFalse,
Option::None => None,
}
}
}
impl<'a> From<&'a Option<bool>> for OptionBool {
#[inline]
fn from(o: &'a Option<bool>) -> Self {
match *o {
Option::Some(true) => SomeTrue,
Option::Some(false) => SomeFalse,
Option::None => None,
}
}
}
/// A trait whose implementation for any type `T` allows the use of
///`Optioned<T>` where `T` is bound by both `Sized` and `Copy`.
pub trait Noned {
/// Returns `true` if the contained value is the declared `None` for `T`,
/// `false` otherwise.
fn is_none(&self) -> bool;
/// Returns the declared `None` value for `T`.
fn get_none() -> Self;
}
impl Noned for u8 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u8::MAX
}
#[inline]
fn get_none() -> u8 {
std::u8::MAX
}
}
impl Noned for u16 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u16::MAX
}
#[inline]
fn get_none() -> u16 {
std::u16::MAX
}
}
impl Noned for u32 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u32::MAX
}
#[inline]
fn get_none() -> u32 {
std::u32::MAX
}
}
impl Noned for u64 {
#[inline]
fn is_none(&self) -> bool {
self == &std::u64::MAX
}
#[inline]
fn get_none() -> u64 {
std::u64::MAX
}
}
impl Noned for usize {
#[inline]
fn is_none(&self) -> bool {
self == &std::usize::MAX
}
#[inline]
fn get_none() -> usize {
std::usize::MAX
}
}
impl Noned for i8 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i8::MIN
}
#[inline]
fn get_none() -> i8 {
std::i8::MIN
}
}
impl Noned for i16 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i16::MIN
}
#[inline]
fn get_none() -> i16 {
std::i16::MIN
}
}
impl Noned for i32 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i32::MIN
}
#[inline]
fn get_none() -> i32 {
std::i32::MIN
}
}
impl Noned for i64 {
#[inline]
fn is_none(&self) -> bool {
self == &std::i64::MIN
}
#[inline]
fn get_none() -> i64 {
std::i64::MIN
}
}
impl Noned for isize {
#[inline]
fn is_none(&self) -> bool {
self == &std::isize::MIN
}
#[inline]
fn get_none() -> isize {
std::isize::MIN
}
}
impl Noned for f32 {
#[inline]
fn is_none(&self) -> bool {
self.is_nan()
}
#[inline]
fn get_none() -> f32 {
std::f32::NAN
}
}
impl Noned for f64 {
#[inline]
fn is_none(&self) -> bool {
self.is_nan()
}
#[inline]
fn get_none() -> f64 {
std::f64::NAN
}
}
impl Noned for char {
#[inline]
fn is_none(&self) -> bool { *self == '\0' }
#[inline]
fn get_none() -> char { '\0' }
}
///Equality within Optioned
pub trait OptEq {
/// Is the other optioned equal to this one?
#[inline]
fn opt_eq(&self, other: &Self) -> bool;
}
impl OptEq for u8 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for u16 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for u32 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for u64 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for usize {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i8 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i16 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i32 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for i64 {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for isize {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
impl OptEq for f32 {
fn opt_eq(&self, other: &Self) -> bool {
if self.is_nan() {
other.is_nan()
} else {
self == other
}
}
}
impl OptEq for f64 {
fn opt_eq(&self, other: &Self) -> bool {
if self.is_nan() {
other.is_nan()
} else {
self == other
}
}
}
impl OptEq for char {
fn opt_eq(&self, other: &Self) -> bool {
self == other
}
}
///Ordering within Optioned
pub trait OptOrd {
/// compare this Optioned with another
#[inline]
fn opt_cmp(&self, other: &Self) -> Ordering;
}
#[inline]
fn _opt_cmp<T: Ord + Copy + Noned>(a: &T, b: &T) -> Ordering {
if a.is_none() {
if b.is_none() {
Ordering::Equal
} else {
Ordering::Less
}
} else if b.is_none() {
Ordering::Greater
} else {
a.cmp(b)
}
}
#[inline]
fn _opt_cmp_part<T: PartialOrd + Copy + Noned>(a: &T, b: &T) -> Ordering {
if a.is_none() {
if b.is_none() {
Ordering::Equal
} else {
Ordering::Less
}
} else if b.is_none() {
Ordering::Greater
} else {
a.partial_cmp(b).unwrap()
}
}
impl OptOrd for u8 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for u16 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for u32 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for u64 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for usize {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
impl OptOrd for i8 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for i16 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for i32 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for i64 {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for isize {
fn opt_cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl OptOrd for f32 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp_part(self, other)
}
}
impl OptOrd for f64 {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp_part(self, other)
}
}
impl OptOrd for char {
fn opt_cmp(&self, other: &Self) -> Ordering {
_opt_cmp(self, other)
}
}
/// An `Option<T>`-like structure that takes only as much space as the enclosed
/// value, at the cost of removing one particular `None` value from the value
/// domain (see `Noned`)
#[derive(Copy, Clone)]
pub struct Optioned<T: Noned + Copy> {
value: T,
}
/// Equality works as usual.
///
/// # Examples
///
/// ```
///# use ::optional::{some, none};
/// assert_eq!(some(1u8), some(1u8));
/// assert_eq!(none::<u32>(), none::<u32>());
/// ```
impl<T> PartialEq for Optioned<T>
where
T: OptEq + Noned + Copy,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
self.value.opt_eq(&other.value)
}
}
impl<T> Eq for Optioned<T>
where
T: OptEq + Noned + Copy,
{
}
impl<T> PartialOrd for Optioned<T>
where
T: PartialEq + OptEq + OptOrd + Noned + Copy,
{
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.value.opt_cmp(&other.value))
}
}
impl<T> Ord for Optioned<T>
where
T: Eq + OptEq + OptOrd + Noned + Copy,
{
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.value.opt_cmp(&other.value)
}
}
impl<T> Hash for Optioned<T>
where
T: Noned + Copy + Hash,
{
#[inline]
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.value.hash(state)
}
}
#[cfg(feature = "serde")]
impl<'de, T> serde::Deserialize<'de> for Optioned<T>
where
T: Noned + Copy + serde::Deserialize<'de>,
{
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::{Optioned, Noned, some};
///# fn main() {
/// assert_eq!("1.0", serde_json::to_string(&some(1f32)).unwrap());
///# }
/// ```
fn deserialize<D>(deserializer: D) -> Result<Optioned<T>, D::Error>
where
D: serde::Deserializer<'de>,
{
Option::<T>::deserialize(deserializer).map(Optioned::from)
}
}
#[cfg(feature = "serde")]
impl<T> serde::Serialize for Optioned<T>
where
T: Noned + Copy + serde::Serialize,
{
/// with `feature = "serde"`, (de)serialization support is active.
///
/// ```rust
///# extern crate serde_json;
///# extern crate optional;
///# use optional::{Optioned, Noned, some};
///# fn main() {
/// assert_eq!(some(1f32), serde_json::from_str("1.0").unwrap());
///# }
/// ```
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.as_option().serialize(serializer)
}
}
mod slice_of_up_to_one {
/// Get a slice of zero or one elements from a ref to the single value and a bool whether
/// the value should be included
///
/// note: This is safe because:
///
/// ```rust
/// assert_eq!(0, false as usize); // empty slice
/// assert_eq!(1, true as usize); // slice of one element
/// ```
#[inline]
pub fn slice_of<T>(value: &T, one: bool) -> &[T] {
unsafe { ::std::slice::from_raw_parts(value, one as usize) }
}
}
impl<T: Noned + Copy> Optioned<T> {
/// Create an `Optioned<T>` that is `some(t)`.
///
/// # Panics
///
/// panics if the supplied value is the None value
///
/// # Examples
///
/// ```
///# use ::optional::Optioned;
/// Optioned::<i32>::some(1); // Optioned(1)
/// ```
///
/// ```should_panic
///# use ::optional::Optioned;
/// Optioned::<f64>::some(std::f64::NAN); // panic!s
/// ```
#[inline]
pub fn some(t: T) -> Self {
assert!(!t.is_none());
Optioned::<T> { value: t }
}
/// Create an `Optioned<T>` that is `none()`.
///
/// # Examples
///
/// ```
///# use ::optional::Optioned;
/// Optioned::<u16>::none(); // Optioned(std::u16::MAX)
/// ```
#[inline]
pub fn none() -> Self {
Optioned::<T> {
value: <T as Noned>::get_none(),
}
}
#[inline]
fn as_option(&self) -> Option<T> {
if self.value.is_none() {
Option::None
} else {
Option::Some(self.value)
}
}
/// Returns `true` if this `Optioned` is `None`, `false` otherwise.
#[inline]
pub fn is_none(&self) -> bool {
self.value.is_none()
}
/// Returns `true` if this `Optioned` contains a value, `false` otherwise.
#[inline]
pub fn is_some(&self) -> bool {
!self.value.is_none()
}
/// Unwraps the value, if any, else panics with the given message.
///
/// # Panics
///
/// if self is None
///
/// # Examples
///
/// For Some(_), the corresponding value is returned.
///
/// ```
///# use optional::Optioned;
/// assert_eq!(42u8, Optioned::some(42u8).expect("FAIL"));
/// ```
///
/// On None, it panics with the given message.
///
/// ```should_panic
///# use optional::Optioned;
///Optioned::<u8>::none().expect("FAIL"); // panics with FAIL
/// ```
#[inline]
pub fn expect(&self, msg: &str) -> T {
if self.is_none() {
panic!("{}", msg)
}
self.value
}
/// Unwraps the value, if any, else panics with "unwrap called on None".
///
/// # Panics
///
/// if self is `None`
///
/// # Examples
///
/// For `Some(_)`, the corresponding value is returned.
///
/// ```
///# use optional::Optioned;
/// assert_eq!(42u8, Optioned::some(42u8).unwrap());
/// ```
///
/// On `None`, it panics with the given message.
///
/// ```should_panic
///# use optional::Optioned;
///Optioned::<u8>::none().unwrap(); // panics
/// ```
#[inline]
pub fn unwrap(&self) -> T {
self.expect("unwrap called on None")
}
/// Returns the contained value, even if None.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(-128i8, none().unpack());
/// assert_eq!(1u32, some(1).unpack());
/// ```
#[inline]
pub fn unpack(&self) -> T {
self.value
}
/// Returns the contained value or a default.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(-1i8, some(-1i8).unwrap_or(127i8));
/// assert_eq!(42u16, none().unwrap_or(42u16));
/// ```
#[inline]
pub fn unwrap_or(&self, def: T) -> T {
if self.is_none() {
def
} else {
self.value
}
}
/// Returns the contained value or a calculated default.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(-1i8, some(-1i8).unwrap_or_else(|| panic!()));
/// assert_eq!(42u16, none().unwrap_or_else(|| 42u16));
/// ```
#[inline]
pub fn unwrap_or_else<F>(self, f: F) -> T
where
F: FnOnce() -> T,
{
if self.is_none() {
f()
} else {
self.value
}
}
/// Maps the `Optioned` to an `Option<U>` by applying the function over the
/// contained value, if any.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!(Some(-42), some(42i8).map(|x| -x));
/// assert_eq!(None, none::<i8>().map(|x| -x));
/// ```
#[inline]
pub fn map<U, F>(self, f: F) -> Option<U>
where
F: FnOnce(T) -> U,
{
if self.is_none() {
Option::None
} else {
Option::Some(f(self.value))
}
}
/// Maps the `Optioned<T>` to an `Optioned<U>` by applying the function over
/// the contained value, if any. Requires that the result type of the
/// function be `Noned + Copy`, as other types aren't compatible with
/// Optioned.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!(some(-42), some(42i8).map_t(|x| -x));
/// assert_eq!(none::<i8>(), none::<i8>().map_t(|x| -x));
/// ```
#[inline]
pub fn map_t<U, F>(self, f: F) -> Optioned<U>
where
F: FnOnce(T) -> U,
U: Noned + Copy,
{
if self.is_none() {
none()
} else {
some(f(self.value))
}
}
/// Maps the contained value to a `U` by applying the function or return a
/// default.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!("1", some(1usize).map_or("Unknown".to_string(), |b| b.to_string()));
/// assert_eq!("Unknown", none::<usize>().map_or("Unknown".to_string(), |b| b.to_string()));
/// ```
#[inline]
pub fn map_or<U, F>(self, default: U, f: F) -> U
where
F: FnOnce(T) -> U,
{
if self.is_none() {
default
} else {
f(self.value)
}
}
/// Maps a value to a `U` by applying the function or return a computed
/// default.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!("1", some(1usize).map_or_else(|| "Unknown".to_string(),
/// |b| b.to_string()));
/// assert_eq!("Unknown", none::<usize>().map_or_else(
/// || "Unknown".to_string(), |b| b.to_string()));
/// ```
#[inline]
pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
where
D: FnOnce() -> U,
F: FnOnce(T) -> U,
{
if self.is_none() {
default()
} else {
f(self.value)
}
}
/// Returns this option if it contains a value, otherwise returns the other.
///
/// Arguments passed to `or` are eagerly evaluated;
/// if you are passing the result of a function call,
/// it is recommended to use `or_else`, which is lazily evaluated.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none};
/// let x = some(2);
/// let y = none();
/// assert_eq!(x.or(y), some(2));
///
/// let x = none();
/// let y = some(100);
/// assert_eq!(x.or(y), some(100));
///
/// let x = some(2);
/// let y = some(100);
/// assert_eq!(x.or(y), some(2));
///
/// let x: Optioned<u32> = none();
/// let y = none();
/// assert_eq!(x.or(y), none());
/// ```
#[inline]
pub fn or(self, other: Optioned<T>) -> Optioned<T> {
if self.is_some() {
self
} else {
other
}
}
/// Returns this option if it contains a value, otherwise calls `f` and returns the result.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none};
/// fn nothing() -> Optioned<u32> { none() }
/// fn something() -> Optioned<u32> { some(1) }
///
/// assert_eq!(some(2).or_else(something), some(2));
/// assert_eq!(none().or_else(something), some(1));
/// assert_eq!(none().or_else(nothing), none());
/// ```
pub fn or_else<F>(self, f: F) -> Optioned<T>
where
F: FnOnce() -> Optioned<T>,
{
if self.is_some() {
self
} else {
f()
}
}
/// Returns the `None` value for type `U` if this value or `other` contains their respective
/// `None` values. Otherwise returns the `other` `Optioned` struct.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none};
/// let the_other = some::<u32>(42);
///
/// assert_eq!(some('a').and(the_other), some(42));
/// assert_eq!(none::<char>().and(the_other), none::<u32>());
/// assert_eq!(some('a').and(none::<u32>()), none::<u32>());
/// assert_eq!(none::<char>().and(none::<u32>()), none::<u32>());
/// ```
#[inline]
pub fn and<U>(self, other: Optioned<U>) -> Optioned<U>
where
U: Noned + Copy
{
if self.is_some() {
other
} else {
none::<U>()
}
}
/// Returns this `Optioned` if it contains the the `None` value, otherwise calls `f` with
/// the contained value and returns the result as an `Optioned<U>`.
///
/// # Examples
///
/// ```
/// # use optional::{Optioned, some, none, wrap};
/// fn nothing() -> Optioned<u32> { none() }
/// fn something() -> Optioned<u32> { some(1) }
/// fn add_two(val: u32) -> Optioned<u32> {
/// wrap( val + 2)
/// }
///
/// fn failed_function(val: u32) -> Optioned<u32> {
/// none()
/// }
///
/// assert_eq!(some(2).and_then(add_two), some(4));
/// assert_eq!(none().and_then(add_two), none());
/// assert_eq!(some(2).and_then(failed_function), none());
/// assert_eq!(none().and_then(failed_function), none());
/// ```
#[inline]
pub fn and_then<F,U>(self, f: F) -> Optioned<U>
where
F: FnOnce(T) -> Optioned<U>,
U: Noned + Copy
{
if self.is_some() {
f(self.value)
} else {
none()
}
}
/// Takes the value out of the `Optioned` and returns ist as
/// `Option<T>`, changing self to `None`.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// let mut x = some(1u8);
/// assert_eq!(Some(1u8), x.take());
/// assert!(x.is_none());
/// ```
#[inline]
pub fn take(&mut self) -> Option<T> {
mem::replace(self, Self::none()).as_option()
}
/// Return a possibly empty slice over the contained value, if any.
///
/// # Examples
/// ```
///# use optional::{some, none};
/// assert_eq!(&[42], some(42u8).as_slice());
/// assert!(none::<i16>().as_slice().is_empty());
/// ```
#[inline]
pub fn as_slice(&self) -> &[T] {
slice_of_up_to_one::slice_of(&self.value, self.is_some())
}
/// return an iterator over all contained (that is zero or one) values.
///
/// # Examples
///
/// ```
///# use optional::{some, none};
/// assert_eq!(None, none::<u64>().iter().next());
/// assert_eq!(Some(42u64), some(42u64).iter().next());
/// ```
#[inline]
pub fn iter(&self) -> OptionedIter<T> {
OptionedIter { o: *self } // make a copy
}
}
/// Create an `Optioned<T>` that is `some(t)`.
///
/// # Panics
///
/// panics if the supplied value is the None value
///
/// # Examples
///
/// ```
///# use ::optional::some;
/// some(1i32); // Optioned(1i32)
/// ```
///
/// ```should_panic
///# use ::optional::some;
/// some(std::f64::NAN); // panic!s
/// ```
pub fn some<T: Noned + Copy>(value: T) -> Optioned<T> {
Optioned::<T>::some(value)
}
/// Create a `None Optioned<T>`. Note that the type must be inferrible
/// from the context, or you'd need to call with `::<T>` where `T` is
/// the specific type.
///
/// # Examples
///
/// ```
///# use ::optional::{none, Optioned};
/// let x : Optioned<i16> = none();
/// none::<f32>();
/// ```
pub fn none<T: Noned + Copy>() -> Optioned<T> {
Optioned::<T>::none()
}
/// Wrap a `T` into an `Optioned<T>`, regardless of its None-ness.
///
/// # Examples
///
/// ```
///# use optional::wrap;
/// assert!(wrap(1u8).is_some());
/// assert!(wrap(255u8).is_none());
/// ```
pub fn wrap<T: Noned + Copy>(v: T) -> Optioned<T> {
Optioned { value: v }
}
impl<T: Noned + Copy + Debug> Debug for Optioned<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), Error> {
if self.is_none() {
write!(f, "None")
} else {
write!(f, "Some({:?})", &self.value)
}
}
}
impl<T: Noned + Copy> Default for Optioned<T> {
#[inline]
fn default() -> Optioned<T> {
none()
}
}
/// iterate over an Optioned<T>
#[derive(Copy, Clone)]
pub struct OptionedIter<T: Noned + Copy> {
o: Optioned<T>,
}
impl<T: Noned + Copy> Iterator for OptionedIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.o.take()
}
}
impl<'a, T: Noned + Copy> From<&'a Option<T>> for Optioned<T> {
#[inline]
fn from(o: &Option<T>) -> Optioned<T> {
o.map_or_else(Self::none, Self::some)
}
}
impl<T: Noned + Copy> From<Option<T>> for Optioned<T> {
#[inline]
fn from(o: Option<T>) -> Optioned<T> {
o.map_or_else(Self::none, Self::some)
}
}
impl<T: Noned + Copy> Into<Option<T>> for Optioned<T> {
#[inline]
fn into(self) -> Option<T> {
self.as_option()
}
}
impl<T: Noned + Copy> From<T> for Optioned<T> {
#[inline]
fn from(o: T) -> Optioned<T> {
wrap(o)
}
}
|
//! SocketCAN support.
//!
//! The Linux kernel supports using CAN-devices through a network-like API
//! (see https://www.kernel.org/doc/Documentation/networking/can.txt). This
//! crate allows easy access to this functionality without having to wrestle
//! libc calls.
//!
//! # An introduction to CAN
//!
//! The CAN bus was originally designed to allow microcontrollers inside a
//! vehicle to communicate over a single shared bus. Messages called
//! *frames* are multicast to all devices on the bus.
//!
//! Every frame consists of an ID and a payload of up to 8 bytes. If two
//! devices attempt to send a frame at the same time, the device with the
//! higher ID will notice the conflict, stop sending and reattempt to sent its
//! frame in the next time slot. This means that the lower the ID, the higher
//! the priority. Since most devices have a limited buffer for outgoing frames,
//! a single device with a high priority (== low ID) can block communication
//! on that bus by sending messages too fast.
//!
//! The Linux socketcan subsystem makes the CAN bus available as a regular
//! networking device. Opening an network interface allows receiving all CAN
//! messages received on it. A device CAN be opened multiple times, every
//! client will receive all CAN frames simultaneously.
//!
//! Similarly, CAN frames can be sent to the bus by multiple client
//! simultaneously as well.
//!
//! # Hardware and more information
//!
//! More information on CAN [can be found on Wikipedia](). When not running on
//! an embedded platform with already integrated CAN components,
//! [Thomas Fischl's USBtin](http://www.fischl.de/usbtin/) (see
//! [section 2.4](http://www.fischl.de/usbtin/#socketcan)) is one of many ways
//! to get started.
// clippy: do not warn about things like "SocketCAN" inside the docs
#![cfg_attr(feature = "cargo-clippy", allow(doc_markdown))]
extern crate hex;
extern crate itertools;
extern crate libc;
extern crate nix;
extern crate try_from;
mod err;
pub use err::{CanError, CanErrorDecodingFailure};
pub mod dump;
mod util;
#[cfg(test)]
mod tests;
use libc::{c_int, c_short, c_void, c_uint, c_ulong, socket, SOCK_RAW, close, bind, sockaddr, read,
write, SOL_SOCKET, SO_RCVTIMEO, timespec, timeval, EINPROGRESS, SO_SNDTIMEO};
use itertools::Itertools;
use nix::net::if_::if_nametoindex;
use std::{error, fmt, io, time};
use std::mem::{size_of, uninitialized};
use util::{set_socket_option, set_socket_option_mult};
/// Check an error return value for timeouts.
///
/// Due to the fact that timeouts are reported as errors, calling `read_frame`
/// on a socket with a timeout that does not receive a frame in time will
/// result in an error being returned. This trait adds a `should_retry` method
/// to `Error` and `Result` to check for this condition.
pub trait ShouldRetry {
/// Check for timeout
///
/// If `true`, the error is probably due to a timeout.
fn should_retry(&self) -> bool;
}
impl ShouldRetry for io::Error {
fn should_retry(&self) -> bool {
match self.kind() {
// EAGAIN, EINPROGRESS and EWOULDBLOCK are the three possible codes
// returned when a timeout occurs. the stdlib already maps EAGAIN
// and EWOULDBLOCK os WouldBlock
io::ErrorKind::WouldBlock => true,
// however, EINPROGRESS is also valid
io::ErrorKind::Other => {
if let Some(i) = self.raw_os_error() {
i == EINPROGRESS
} else {
false
}
}
_ => false,
}
}
}
impl<E: fmt::Debug> ShouldRetry for io::Result<E> {
fn should_retry(&self) -> bool {
if let Err(ref e) = *self {
e.should_retry()
} else {
false
}
}
}
// constants stolen from C headers
const AF_CAN: c_int = 29;
const PF_CAN: c_int = 29;
const CAN_RAW: c_int = 1;
const SOL_CAN_BASE: c_int = 100;
const SOL_CAN_RAW: c_int = SOL_CAN_BASE + CAN_RAW;
const CAN_RAW_FILTER: c_int = 1;
const CAN_RAW_ERR_FILTER: c_int = 2;
const CAN_RAW_LOOPBACK: c_int = 3;
const CAN_RAW_RECV_OWN_MSGS: c_int = 4;
// unused:
// const CAN_RAW_FD_FRAMES: c_int = 5;
const CAN_RAW_JOIN_FILTERS: c_int = 6;
// get timestamp in a struct timeval (us accuracy)
// const SIOCGSTAMP: c_int = 0x8906;
// get timestamp in a struct timespec (ns accuracy)
const SIOCGSTAMPNS: c_int = 0x8907;
/// if set, indicate 29 bit extended format
pub const EFF_FLAG: u32 = 0x80000000;
/// remote transmission request flag
pub const RTR_FLAG: u32 = 0x40000000;
/// error flag
pub const ERR_FLAG: u32 = 0x20000000;
/// valid bits in standard frame id
pub const SFF_MASK: u32 = 0x000007ff;
/// valid bits in extended frame id
pub const EFF_MASK: u32 = 0x1fffffff;
/// valid bits in error frame
pub const ERR_MASK: u32 = 0x1fffffff;
/// an error mask that will cause SocketCAN to report all errors
pub const ERR_MASK_ALL: u32 = ERR_MASK;
/// an error mask that will cause SocketCAN to silently drop all errors
pub const ERR_MASK_NONE: u32 = 0;
#[cfg(target_pointer_width = "64")]
fn c_timeval_new(t: time::Duration) -> timeval {
timeval {
tv_sec: t.as_secs() as i64,
tv_usec: (t.subsec_nanos() / 1000) as i64,
}
}
#[cfg(target_pointer_width = "32")]
fn c_timeval_new(t: time::Duration) -> timeval {
timeval {
tv_sec: t.as_secs() as i32,
tv_usec: (t.subsec_nanos() / 1000) as i32,
}
}
#[derive(Debug)]
#[repr(C)]
struct CanAddr {
_af_can: c_short,
if_index: c_int, // address familiy,
rx_id: u32,
tx_id: u32,
}
#[derive(Debug)]
/// Errors opening socket
pub enum CanSocketOpenError {
/// Device could not be found
LookupError(nix::Error),
/// System error while trying to look up device name
IOError(io::Error),
}
impl fmt::Display for CanSocketOpenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CanSocketOpenError::LookupError(ref e) => write!(f, "CAN Device not found: {}", e),
CanSocketOpenError::IOError(ref e) => write!(f, "IO: {}", e),
}
}
}
impl error::Error for CanSocketOpenError {
fn description(&self) -> &str {
match *self {
CanSocketOpenError::LookupError(_) => "can device not found",
CanSocketOpenError::IOError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
CanSocketOpenError::LookupError(ref e) => Some(e),
CanSocketOpenError::IOError(ref e) => Some(e),
}
}
}
#[derive(Debug, Copy, Clone)]
/// Error that occurs when creating CAN packets
pub enum ConstructionError {
/// CAN ID was outside the range of valid IDs
IDTooLarge,
/// More than 8 Bytes of payload data were passed in
TooMuchData,
}
impl fmt::Display for ConstructionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ConstructionError::IDTooLarge => write!(f, "CAN ID too large"),
ConstructionError::TooMuchData => {
write!(f, "Payload is larger than CAN maximum of 8 bytes")
}
}
}
}
impl error::Error for ConstructionError {
fn description(&self) -> &str {
match *self {
ConstructionError::IDTooLarge => "can id too large",
ConstructionError::TooMuchData => "too much data",
}
}
}
impl From<nix::Error> for CanSocketOpenError {
fn from(e: nix::Error) -> CanSocketOpenError {
CanSocketOpenError::LookupError(e)
}
}
impl From<io::Error> for CanSocketOpenError {
fn from(e: io::Error) -> CanSocketOpenError {
CanSocketOpenError::IOError(e)
}
}
/// A socket for a CAN device.
///
/// Will be closed upon deallocation. To close manually, use std::drop::Drop.
/// Internally this is just a wrapped file-descriptor.
#[derive(Debug)]
pub struct CanSocket {
fd: c_int,
}
impl CanSocket {
/// Open a named CAN device.
///
/// Usually the more common case, opens a socket can device by name, such
/// as "vcan0" or "socan0".
pub fn open(ifname: &str) -> Result<CanSocket, CanSocketOpenError> {
let if_index = if_nametoindex(ifname)?;
CanSocket::open_if(if_index)
}
/// Open CAN device by interface number.
///
/// Opens a CAN device by kernel interface number.
pub fn open_if(if_index: c_uint) -> Result<CanSocket, CanSocketOpenError> {
let addr = CanAddr {
_af_can: AF_CAN as c_short,
if_index: if_index as c_int,
rx_id: 0, // ?
tx_id: 0, // ?
};
// open socket
let sock_fd;
unsafe {
sock_fd = socket(PF_CAN, SOCK_RAW, CAN_RAW);
}
if sock_fd == -1 {
return Err(CanSocketOpenError::from(io::Error::last_os_error()));
}
// bind it
let bind_rv;
unsafe {
let sockaddr_ptr = &addr as *const CanAddr;
bind_rv = bind(sock_fd,
sockaddr_ptr as *const sockaddr,
size_of::<CanAddr>() as u32);
}
// FIXME: on fail, close socket (do not leak socketfds)
if bind_rv == -1 {
let e = io::Error::last_os_error();
unsafe {
close(sock_fd);
}
return Err(CanSocketOpenError::from(e));
}
Ok(CanSocket { fd: sock_fd })
}
fn close(&mut self) -> io::Result<()> {
unsafe {
let rv = close(self.fd);
if rv != -1 {
return Err(io::Error::last_os_error());
}
}
Ok(())
}
/// Sets the read timeout on the socket
///
/// For convenience, the result value can be checked using
/// `ShouldRetry::should_retry` when a timeout is set.
pub fn set_read_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_RCVTIMEO, &c_timeval_new(duration))
}
/// Sets the write timeout on the socket
pub fn set_write_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_SNDTIMEO, &c_timeval_new(duration))
}
/// Blocking read a single can frame.
pub fn read_frame(&self) -> io::Result<CanFrame> {
let mut frame = CanFrame {
_id: 0,
_data_len: 0,
_pad: 0,
_res0: 0,
_res1: 0,
_data: [0; 8],
};
let read_rv = unsafe {
let frame_ptr = &mut frame as *mut CanFrame;
read(self.fd, frame_ptr as *mut c_void, size_of::<CanFrame>())
};
if read_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(frame)
}
/// Blocking read a single can frame with timestamp
///
/// Note that reading a frame and retrieving the timestamp requires two
/// consecutive syscalls. To avoid race conditions, exclusive access
/// to the socket is enforce through requiring a `mut &self`.
pub fn read_frame_with_timestamp(&mut self) -> io::Result<(CanFrame, time::SystemTime)> {
let frame = self.read_frame()?;
let mut ts: timespec;
let rval = unsafe {
// we initialize tv calling ioctl, passing this responsibility on
ts = uninitialized();
libc::ioctl(self.fd, SIOCGSTAMPNS as c_ulong, &mut ts as *mut timespec)
};
if rval == -1 {
return Err(io::Error::last_os_error());
}
Ok((frame, util::system_time_from_timespec(ts)))
}
/// Write a single can frame.
///
/// Note that this function can fail with an `EAGAIN` error or similar.
/// Use `write_frame_insist` if you need to be sure that the message got
/// sent or failed.
pub fn write_frame(&self, frame: &CanFrame) -> io::Result<()> {
// not a mutable reference needed (see std::net::UdpSocket) for
// a comparison
// debug!("Sending: {:?}", frame);
let write_rv = unsafe {
let frame_ptr = frame as *const CanFrame;
write(self.fd, frame_ptr as *const c_void, size_of::<CanFrame>())
};
if write_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(())
}
/// Blocking write a single can frame, retrying until it gets sent
/// successfully.
pub fn write_frame_insist(&self, frame: &CanFrame) -> io::Result<()> {
loop {
match self.write_frame(frame) {
Ok(v) => return Ok(v),
Err(e) => {
if !e.should_retry() {
return Err(e);
}
}
}
}
}
/// Sets filters on the socket.
///
/// CAN packages received by SocketCAN are matched against these filters,
/// only matching packets are returned by the interface.
///
/// See `CanFilter` for details on how filtering works. By default, all
/// single filter matching all incoming frames is installed.
pub fn set_filters(&self, filters: &[CanFilter]) -> io::Result<()> {
set_socket_option_mult(self.fd, SOL_CAN_RAW, CAN_RAW_FILTER, filters)
}
/// Sets the error mask on the socket.
///
/// By default (`ERR_MASK_NONE`) no error conditions are reported as
/// special error frames by the socket. Enabling error conditions by
/// setting `ERR_MASK_ALL` or another non-empty error mask causes the
/// socket to receive notification about the specified conditions.
#[inline]
pub fn set_error_mask(&self, mask: u32) -> io::Result<()> {
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_ERR_FILTER, &mask)
}
/// Enable or disable loopback.
///
/// By default, loopback is enabled, causing other applications that open
/// the same CAN bus to see frames emitted by different applications on
/// the same system.
#[inline]
pub fn set_loopback(&self, enabled: bool) -> io::Result<()> {
let loopback: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_LOOPBACK, &loopback)
}
/// Enable or disable receiving of own frames.
///
/// When loopback is enabled, this settings controls if CAN frames sent
/// are received back immediately by sender. Default is off:
pub fn set_recv_own_msgs(&self, enabled: bool) -> io::Result<()> {
let recv_own_msgs: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_RECV_OWN_MSGS, &recv_own_msgs)
}
/// Enable or disable join filters.
///
/// By default a frame is accepted if it matches any of the filters set
/// with `set_filters`. If join filters is enabled, a frame has to match
/// _all_ filters to be accepted.
pub fn set_join_filters(&self, enabled: bool) -> io::Result<()> {
let join_filters: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_JOIN_FILTERS, &join_filters)
}
}
impl Drop for CanSocket {
fn drop(&mut self) {
self.close().ok(); // ignore result
}
}
/// CanFrame
///
/// Uses the same memory layout as the underlying kernel struct for performance
/// reasons.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFrame {
/// 32 bit CAN_ID + EFF/RTR/ERR flags
_id: u32,
/// data length. Bytes beyond are not valid
_data_len: u8,
/// padding
_pad: u8,
/// reserved
_res0: u8,
/// reserved
_res1: u8,
/// buffer for data
_data: [u8; 8],
}
impl CanFrame {
pub fn new(id: u32, data: &[u8], rtr: bool, err: bool) -> Result<CanFrame, ConstructionError> {
let mut _id = id;
if data.len() > 8 {
return Err(ConstructionError::TooMuchData);
}
if id > EFF_MASK {
return Err(ConstructionError::IDTooLarge);
}
// set EFF_FLAG on large message
if id > SFF_MASK {
_id |= EFF_FLAG;
}
if rtr {
_id |= RTR_FLAG;
}
if err {
_id |= ERR_FLAG;
}
let mut full_data = [0; 8];
// not cool =/
for (n, c) in data.iter().enumerate() {
full_data[n] = *c;
}
Ok(CanFrame {
_id: _id,
_data_len: data.len() as u8,
_pad: 0,
_res0: 0,
_res1: 0,
_data: full_data,
})
}
/// Return the actual CAN ID (without EFF/RTR/ERR flags)
#[inline]
pub fn id(&self) -> u32 {
if self.is_extended() {
self._id & EFF_MASK
} else {
self._id & SFF_MASK
}
}
/// Return the error message
#[inline]
pub fn err(&self) -> u32 {
self._id & ERR_MASK
}
/// Check if frame uses 29 bit extended frame format
#[inline]
pub fn is_extended(&self) -> bool {
self._id & EFF_FLAG != 0
}
/// Check if frame is an error message
#[inline]
pub fn is_error(&self) -> bool {
self._id & ERR_FLAG != 0
}
/// Check if frame is a remote transmission request
#[inline]
pub fn is_rtr(&self) -> bool {
self._id & RTR_FLAG != 0
}
/// A slice into the actual data. Slice will always be <= 8 bytes in length
#[inline]
pub fn data(&self) -> &[u8] {
&self._data[..(self._data_len as usize)]
}
/// Read error from message and transform it into a `CanError`.
///
/// SocketCAN errors are indicated using the error bit and coded inside
/// id and data payload. Call `error()` converts these into usable
/// `CanError` instances.
///
/// If the frame is malformed, this may fail with a
/// `CanErrorDecodingFailure`.
#[inline]
pub fn error(&self) -> Result<CanError, CanErrorDecodingFailure> {
CanError::from_frame(self)
}
}
impl fmt::UpperHex for CanFrame {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{:X}#", self.id())?;
let mut parts = self.data().iter().map(|v| format!("{:02X}", v));
let sep = if f.alternate() { " " } else { "" };
write!(f, "{}", parts.join(sep))
}
}
/// CanFilter
///
/// Contains an internal id and mask. Packets are considered to be matched by
/// a filter if `received_id & mask == filter_id & mask` holds true.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFilter {
_id: u32,
_mask: u32,
}
impl CanFilter {
/// Construct a new CAN filter.
pub fn new(id: u32, mask: u32) -> Result<CanFilter, ConstructionError> {
Ok(CanFilter {
_id: id,
_mask: mask,
})
}
}
Typo.
//! SocketCAN support.
//!
//! The Linux kernel supports using CAN-devices through a network-like API
//! (see https://www.kernel.org/doc/Documentation/networking/can.txt). This
//! crate allows easy access to this functionality without having to wrestle
//! libc calls.
//!
//! # An introduction to CAN
//!
//! The CAN bus was originally designed to allow microcontrollers inside a
//! vehicle to communicate over a single shared bus. Messages called
//! *frames* are multicast to all devices on the bus.
//!
//! Every frame consists of an ID and a payload of up to 8 bytes. If two
//! devices attempt to send a frame at the same time, the device with the
//! higher ID will notice the conflict, stop sending and reattempt to sent its
//! frame in the next time slot. This means that the lower the ID, the higher
//! the priority. Since most devices have a limited buffer for outgoing frames,
//! a single device with a high priority (== low ID) can block communication
//! on that bus by sending messages too fast.
//!
//! The Linux socketcan subsystem makes the CAN bus available as a regular
//! networking device. Opening an network interface allows receiving all CAN
//! messages received on it. A device CAN be opened multiple times, every
//! client will receive all CAN frames simultaneously.
//!
//! Similarly, CAN frames can be sent to the bus by multiple client
//! simultaneously as well.
//!
//! # Hardware and more information
//!
//! More information on CAN [can be found on Wikipedia](). When not running on
//! an embedded platform with already integrated CAN components,
//! [Thomas Fischl's USBtin](http://www.fischl.de/usbtin/) (see
//! [section 2.4](http://www.fischl.de/usbtin/#socketcan)) is one of many ways
//! to get started.
// clippy: do not warn about things like "SocketCAN" inside the docs
#![cfg_attr(feature = "cargo-clippy", allow(doc_markdown))]
extern crate hex;
extern crate itertools;
extern crate libc;
extern crate nix;
extern crate try_from;
mod err;
pub use err::{CanError, CanErrorDecodingFailure};
pub mod dump;
mod util;
#[cfg(test)]
mod tests;
use libc::{c_int, c_short, c_void, c_uint, c_ulong, socket, SOCK_RAW, close, bind, sockaddr, read,
write, SOL_SOCKET, SO_RCVTIMEO, timespec, timeval, EINPROGRESS, SO_SNDTIMEO};
use itertools::Itertools;
use nix::net::if_::if_nametoindex;
use std::{error, fmt, io, time};
use std::mem::{size_of, uninitialized};
use util::{set_socket_option, set_socket_option_mult};
/// Check an error return value for timeouts.
///
/// Due to the fact that timeouts are reported as errors, calling `read_frame`
/// on a socket with a timeout that does not receive a frame in time will
/// result in an error being returned. This trait adds a `should_retry` method
/// to `Error` and `Result` to check for this condition.
pub trait ShouldRetry {
/// Check for timeout
///
/// If `true`, the error is probably due to a timeout.
fn should_retry(&self) -> bool;
}
impl ShouldRetry for io::Error {
fn should_retry(&self) -> bool {
match self.kind() {
// EAGAIN, EINPROGRESS and EWOULDBLOCK are the three possible codes
// returned when a timeout occurs. the stdlib already maps EAGAIN
// and EWOULDBLOCK os WouldBlock
io::ErrorKind::WouldBlock => true,
// however, EINPROGRESS is also valid
io::ErrorKind::Other => {
if let Some(i) = self.raw_os_error() {
i == EINPROGRESS
} else {
false
}
}
_ => false,
}
}
}
impl<E: fmt::Debug> ShouldRetry for io::Result<E> {
fn should_retry(&self) -> bool {
if let Err(ref e) = *self {
e.should_retry()
} else {
false
}
}
}
// constants stolen from C headers
const AF_CAN: c_int = 29;
const PF_CAN: c_int = 29;
const CAN_RAW: c_int = 1;
const SOL_CAN_BASE: c_int = 100;
const SOL_CAN_RAW: c_int = SOL_CAN_BASE + CAN_RAW;
const CAN_RAW_FILTER: c_int = 1;
const CAN_RAW_ERR_FILTER: c_int = 2;
const CAN_RAW_LOOPBACK: c_int = 3;
const CAN_RAW_RECV_OWN_MSGS: c_int = 4;
// unused:
// const CAN_RAW_FD_FRAMES: c_int = 5;
const CAN_RAW_JOIN_FILTERS: c_int = 6;
// get timestamp in a struct timeval (us accuracy)
// const SIOCGSTAMP: c_int = 0x8906;
// get timestamp in a struct timespec (ns accuracy)
const SIOCGSTAMPNS: c_int = 0x8907;
/// if set, indicate 29 bit extended format
pub const EFF_FLAG: u32 = 0x80000000;
/// remote transmission request flag
pub const RTR_FLAG: u32 = 0x40000000;
/// error flag
pub const ERR_FLAG: u32 = 0x20000000;
/// valid bits in standard frame id
pub const SFF_MASK: u32 = 0x000007ff;
/// valid bits in extended frame id
pub const EFF_MASK: u32 = 0x1fffffff;
/// valid bits in error frame
pub const ERR_MASK: u32 = 0x1fffffff;
/// an error mask that will cause SocketCAN to report all errors
pub const ERR_MASK_ALL: u32 = ERR_MASK;
/// an error mask that will cause SocketCAN to silently drop all errors
pub const ERR_MASK_NONE: u32 = 0;
#[cfg(target_pointer_width = "64")]
fn c_timeval_new(t: time::Duration) -> timeval {
timeval {
tv_sec: t.as_secs() as i64,
tv_usec: (t.subsec_nanos() / 1000) as i64,
}
}
#[cfg(target_pointer_width = "32")]
fn c_timeval_new(t: time::Duration) -> timeval {
timeval {
tv_sec: t.as_secs() as i32,
tv_usec: (t.subsec_nanos() / 1000) as i32,
}
}
#[derive(Debug)]
#[repr(C)]
struct CanAddr {
_af_can: c_short,
if_index: c_int, // address familiy,
rx_id: u32,
tx_id: u32,
}
#[derive(Debug)]
/// Errors opening socket
pub enum CanSocketOpenError {
/// Device could not be found
LookupError(nix::Error),
/// System error while trying to look up device name
IOError(io::Error),
}
impl fmt::Display for CanSocketOpenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CanSocketOpenError::LookupError(ref e) => write!(f, "CAN Device not found: {}", e),
CanSocketOpenError::IOError(ref e) => write!(f, "IO: {}", e),
}
}
}
impl error::Error for CanSocketOpenError {
fn description(&self) -> &str {
match *self {
CanSocketOpenError::LookupError(_) => "can device not found",
CanSocketOpenError::IOError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
CanSocketOpenError::LookupError(ref e) => Some(e),
CanSocketOpenError::IOError(ref e) => Some(e),
}
}
}
#[derive(Debug, Copy, Clone)]
/// Error that occurs when creating CAN packets
pub enum ConstructionError {
/// CAN ID was outside the range of valid IDs
IDTooLarge,
/// More than 8 Bytes of payload data were passed in
TooMuchData,
}
impl fmt::Display for ConstructionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ConstructionError::IDTooLarge => write!(f, "CAN ID too large"),
ConstructionError::TooMuchData => {
write!(f, "Payload is larger than CAN maximum of 8 bytes")
}
}
}
}
impl error::Error for ConstructionError {
fn description(&self) -> &str {
match *self {
ConstructionError::IDTooLarge => "can id too large",
ConstructionError::TooMuchData => "too much data",
}
}
}
impl From<nix::Error> for CanSocketOpenError {
fn from(e: nix::Error) -> CanSocketOpenError {
CanSocketOpenError::LookupError(e)
}
}
impl From<io::Error> for CanSocketOpenError {
fn from(e: io::Error) -> CanSocketOpenError {
CanSocketOpenError::IOError(e)
}
}
/// A socket for a CAN device.
///
/// Will be closed upon deallocation. To close manually, use std::drop::Drop.
/// Internally this is just a wrapped file-descriptor.
#[derive(Debug)]
pub struct CanSocket {
fd: c_int,
}
impl CanSocket {
/// Open a named CAN device.
///
/// Usually the more common case, opens a socket can device by name, such
/// as "vcan0" or "socan0".
pub fn open(ifname: &str) -> Result<CanSocket, CanSocketOpenError> {
let if_index = if_nametoindex(ifname)?;
CanSocket::open_if(if_index)
}
/// Open CAN device by interface number.
///
/// Opens a CAN device by kernel interface number.
pub fn open_if(if_index: c_uint) -> Result<CanSocket, CanSocketOpenError> {
let addr = CanAddr {
_af_can: AF_CAN as c_short,
if_index: if_index as c_int,
rx_id: 0, // ?
tx_id: 0, // ?
};
// open socket
let sock_fd;
unsafe {
sock_fd = socket(PF_CAN, SOCK_RAW, CAN_RAW);
}
if sock_fd == -1 {
return Err(CanSocketOpenError::from(io::Error::last_os_error()));
}
// bind it
let bind_rv;
unsafe {
let sockaddr_ptr = &addr as *const CanAddr;
bind_rv = bind(sock_fd,
sockaddr_ptr as *const sockaddr,
size_of::<CanAddr>() as u32);
}
// FIXME: on fail, close socket (do not leak socketfds)
if bind_rv == -1 {
let e = io::Error::last_os_error();
unsafe {
close(sock_fd);
}
return Err(CanSocketOpenError::from(e));
}
Ok(CanSocket { fd: sock_fd })
}
fn close(&mut self) -> io::Result<()> {
unsafe {
let rv = close(self.fd);
if rv != -1 {
return Err(io::Error::last_os_error());
}
}
Ok(())
}
/// Sets the read timeout on the socket
///
/// For convenience, the result value can be checked using
/// `ShouldRetry::should_retry` when a timeout is set.
pub fn set_read_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_RCVTIMEO, &c_timeval_new(duration))
}
/// Sets the write timeout on the socket
pub fn set_write_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_SNDTIMEO, &c_timeval_new(duration))
}
/// Blocking read a single can frame.
pub fn read_frame(&self) -> io::Result<CanFrame> {
let mut frame = CanFrame {
_id: 0,
_data_len: 0,
_pad: 0,
_res0: 0,
_res1: 0,
_data: [0; 8],
};
let read_rv = unsafe {
let frame_ptr = &mut frame as *mut CanFrame;
read(self.fd, frame_ptr as *mut c_void, size_of::<CanFrame>())
};
if read_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(frame)
}
/// Blocking read a single can frame with timestamp
///
/// Note that reading a frame and retrieving the timestamp requires two
/// consecutive syscalls. To avoid race conditions, exclusive access
/// to the socket is enforce through requiring a `mut &self`.
pub fn read_frame_with_timestamp(&mut self) -> io::Result<(CanFrame, time::SystemTime)> {
let frame = self.read_frame()?;
let mut ts: timespec;
let rval = unsafe {
// we initialize tv calling ioctl, passing this responsibility on
ts = uninitialized();
libc::ioctl(self.fd, SIOCGSTAMPNS as c_ulong, &mut ts as *mut timespec)
};
if rval == -1 {
return Err(io::Error::last_os_error());
}
Ok((frame, util::system_time_from_timespec(ts)))
}
/// Write a single can frame.
///
/// Note that this function can fail with an `EAGAIN` error or similar.
/// Use `write_frame_insist` if you need to be sure that the message got
/// sent or failed.
pub fn write_frame(&self, frame: &CanFrame) -> io::Result<()> {
// not a mutable reference needed (see std::net::UdpSocket) for
// a comparison
// debug!("Sending: {:?}", frame);
let write_rv = unsafe {
let frame_ptr = frame as *const CanFrame;
write(self.fd, frame_ptr as *const c_void, size_of::<CanFrame>())
};
if write_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(())
}
/// Blocking write a single can frame, retrying until it gets sent
/// successfully.
pub fn write_frame_insist(&self, frame: &CanFrame) -> io::Result<()> {
loop {
match self.write_frame(frame) {
Ok(v) => return Ok(v),
Err(e) => {
if !e.should_retry() {
return Err(e);
}
}
}
}
}
/// Sets filters on the socket.
///
/// CAN packages received by SocketCAN are matched against these filters,
/// only matching packets are returned by the interface.
///
/// See `CanFilter` for details on how filtering works. By default, all
/// single filter matching all incoming frames is installed.
pub fn set_filters(&self, filters: &[CanFilter]) -> io::Result<()> {
set_socket_option_mult(self.fd, SOL_CAN_RAW, CAN_RAW_FILTER, filters)
}
/// Sets the error mask on the socket.
///
/// By default (`ERR_MASK_NONE`) no error conditions are reported as
/// special error frames by the socket. Enabling error conditions by
/// setting `ERR_MASK_ALL` or another non-empty error mask causes the
/// socket to receive notification about the specified conditions.
#[inline]
pub fn set_error_mask(&self, mask: u32) -> io::Result<()> {
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_ERR_FILTER, &mask)
}
/// Enable or disable loopback.
///
/// By default, loopback is enabled, causing other applications that open
/// the same CAN bus to see frames emitted by different applications on
/// the same system.
#[inline]
pub fn set_loopback(&self, enabled: bool) -> io::Result<()> {
let loopback: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_LOOPBACK, &loopback)
}
/// Enable or disable receiving of own frames.
///
/// When loopback is enabled, this settings controls if CAN frames sent
/// are received back immediately by sender. Default is off.
pub fn set_recv_own_msgs(&self, enabled: bool) -> io::Result<()> {
let recv_own_msgs: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_RECV_OWN_MSGS, &recv_own_msgs)
}
/// Enable or disable join filters.
///
/// By default a frame is accepted if it matches any of the filters set
/// with `set_filters`. If join filters is enabled, a frame has to match
/// _all_ filters to be accepted.
pub fn set_join_filters(&self, enabled: bool) -> io::Result<()> {
let join_filters: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_JOIN_FILTERS, &join_filters)
}
}
impl Drop for CanSocket {
fn drop(&mut self) {
self.close().ok(); // ignore result
}
}
/// CanFrame
///
/// Uses the same memory layout as the underlying kernel struct for performance
/// reasons.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFrame {
/// 32 bit CAN_ID + EFF/RTR/ERR flags
_id: u32,
/// data length. Bytes beyond are not valid
_data_len: u8,
/// padding
_pad: u8,
/// reserved
_res0: u8,
/// reserved
_res1: u8,
/// buffer for data
_data: [u8; 8],
}
impl CanFrame {
pub fn new(id: u32, data: &[u8], rtr: bool, err: bool) -> Result<CanFrame, ConstructionError> {
let mut _id = id;
if data.len() > 8 {
return Err(ConstructionError::TooMuchData);
}
if id > EFF_MASK {
return Err(ConstructionError::IDTooLarge);
}
// set EFF_FLAG on large message
if id > SFF_MASK {
_id |= EFF_FLAG;
}
if rtr {
_id |= RTR_FLAG;
}
if err {
_id |= ERR_FLAG;
}
let mut full_data = [0; 8];
// not cool =/
for (n, c) in data.iter().enumerate() {
full_data[n] = *c;
}
Ok(CanFrame {
_id: _id,
_data_len: data.len() as u8,
_pad: 0,
_res0: 0,
_res1: 0,
_data: full_data,
})
}
/// Return the actual CAN ID (without EFF/RTR/ERR flags)
#[inline]
pub fn id(&self) -> u32 {
if self.is_extended() {
self._id & EFF_MASK
} else {
self._id & SFF_MASK
}
}
/// Return the error message
#[inline]
pub fn err(&self) -> u32 {
self._id & ERR_MASK
}
/// Check if frame uses 29 bit extended frame format
#[inline]
pub fn is_extended(&self) -> bool {
self._id & EFF_FLAG != 0
}
/// Check if frame is an error message
#[inline]
pub fn is_error(&self) -> bool {
self._id & ERR_FLAG != 0
}
/// Check if frame is a remote transmission request
#[inline]
pub fn is_rtr(&self) -> bool {
self._id & RTR_FLAG != 0
}
/// A slice into the actual data. Slice will always be <= 8 bytes in length
#[inline]
pub fn data(&self) -> &[u8] {
&self._data[..(self._data_len as usize)]
}
/// Read error from message and transform it into a `CanError`.
///
/// SocketCAN errors are indicated using the error bit and coded inside
/// id and data payload. Call `error()` converts these into usable
/// `CanError` instances.
///
/// If the frame is malformed, this may fail with a
/// `CanErrorDecodingFailure`.
#[inline]
pub fn error(&self) -> Result<CanError, CanErrorDecodingFailure> {
CanError::from_frame(self)
}
}
impl fmt::UpperHex for CanFrame {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{:X}#", self.id())?;
let mut parts = self.data().iter().map(|v| format!("{:02X}", v));
let sep = if f.alternate() { " " } else { "" };
write!(f, "{}", parts.join(sep))
}
}
/// CanFilter
///
/// Contains an internal id and mask. Packets are considered to be matched by
/// a filter if `received_id & mask == filter_id & mask` holds true.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFilter {
_id: u32,
_mask: u32,
}
impl CanFilter {
/// Construct a new CAN filter.
pub fn new(id: u32, mask: u32) -> Result<CanFilter, ConstructionError> {
Ok(CanFilter {
_id: id,
_mask: mask,
})
}
}
|
//! Crate to `#include` C++ headers in your Rust code, and generate
//! idiomatic bindings using `cxx`. See [include_cpp] for details.
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// The crazy macro_rules magic in this file is thanks to dtolnay@
// and is a way of attaching rustdoc to each of the possible directives
// within the include_cpp outer macro. None of the directives actually
// do anything - all the magic is handled entirely by
// autocxx_macro::include_cpp_impl.
#[allow(unused_imports)] // doc cross-reference only
use autocxx_engine::IncludeCppEngine;
#[cfg_attr(doc, aquamarine::aquamarine)]
/// Include some C++ headers in your Rust project.
///
/// This macro allows you to include one or more C++ headers within
/// your Rust code, and call their functions fairly naturally.
///
/// # Examples
///
/// C++ header (`input.h`):
/// ```cpp
/// #include <cstdint>
///
/// uint32_t do_math(uint32_t a);
/// ```
///
/// Rust code:
/// ```
/// # use autocxx_macro::include_cpp_impl as include_cpp;
/// include_cpp!(
/// # parse_only!()
/// #include "input.h"
/// generate!("do_math")
/// safety!(unsafe)
/// );
///
/// # mod ffi { pub fn do_math(a: u32) -> u32 { a+3 } }
/// # fn main() {
/// ffi::do_math(3);
/// # }
/// ```
///
/// The resulting bindings will use idiomatic Rust wrappers for types from the [cxx]
/// crate, for example [cxx::UniquePtr] or [cxx::CxxString]. Due to the care and thought
/// that's gone into the [cxx] crate, such bindings are pleasant and idiomatic to use
/// from Rust, and usually don't require the `unsafe` keyword.
///
/// # User manual - introduction
///
/// [include_cpp] tries to make it possible to include C++ headers and use declared functions
/// and types as-is. The resulting bindings use wrappers for C++ STL types from the [cxx]
/// crate such as [cxx::UniquePtr] or [cxx::CxxString].
///
/// Why, then, do you need a manual? Three reasons:
///
/// * This manual will describe how to include `autocxx` in your build process.
/// * `autocxx` chooses to generate Rust bindings for C++ APIs in particular ways,
/// over which you have _some_ control. The manual discusses what and how.
/// * The combination of `autocxx` and [cxx] are not perfect. There are some STL
/// types and some fundamental C++ features which are not yet supported. Where that occurs,
/// you may need to create some manual bindings or otherwise workaround deficiencies.
/// This manual tells you how to spot such circumstances and work around them.
///
/// # Overview
///
/// Here's how to approach autocxx:
///
/// ```mermaid
/// flowchart TB
/// %%{init:{'flowchart':{'nodeSpacing': 60, 'rankSpacing': 30}}}%%
/// autocxx[Add a dependency on autocxx in your project]
/// which-build([Do you use cargo?])
/// autocxx--->which-build
/// autocxx-build[Add a dev dependency on autocxx-build]
/// build-rs[In your build.rs, tell autocxx-build about your header include path]
/// autocxx-build--->build-rs
/// which-build-- Yes -->autocxx-build
/// macro[Add include_cpp! macro: list headers and allowlist]
/// build-rs--->macro
/// autocxx-gen[Use autocxx-gen command line tool]
/// which-build-- No -->autocxx-gen
/// autocxx-gen--->macro
/// build[Build]
/// macro--->build
/// check[Confirm generation using cargo expand]
/// build--->check
/// manual[Add manual cxx::bridge for anything missing]
/// check--->manual
/// use[Use generated ffi mod APIs]
/// manual--->use
/// ```
///
/// # Configuring the build - if you're using cargo
///
/// You'll use the `autocxx-build` crate. Simply copy from the
/// [demo example](https://github.com/google/autocxx/blob/main/demo/build.rs).
/// You'll need to provide it:
/// * The list of `.rs` files which will have `include_cpp!` macros present
/// * Your C++ header include path.
///
/// # Configuring the build - if you're not using cargo
///
/// See the `autocxx-gen` crate. You'll need to:
///
/// * Run the `codegen` phase. You'll need to use the [autocxx-gen]
/// tool to process the .rs code into C++ header and
/// implementation files. This will also generate `.rs` side bindings.
/// * Educate the procedural macro about where to find the generated `.rs` bindings. Set the
/// `AUTOCXX_RS` environment variable to a list of directories to search.
/// If you use `autocxx-build`, this happens automatically. (You can alternatively
/// specify `AUTOCXX_RS_FILE` to give a precise filename as opposed to a directory to search,
/// though this isn't recommended unless your build system specifically requires it
/// because it allows only a single `include_cpp!` block per `.rs` file.)
///
/// ```mermaid
/// flowchart TB
/// s(Rust source with include_cpp!)
/// c(Existing C++ headers)
/// cg(autocxx-gen or autocxx-build)
/// genrs(Generated .rs file)
/// gencpp(Generated .cpp and .h files)
/// rsb(Rust/Cargo build)
/// cppb(C++ build)
/// l(Linker)
/// s --> cg
/// c --> cg
/// cg --> genrs
/// cg --> gencpp
/// m(autocxx-macro)
/// s --> m
/// genrs-. included .->m
/// m --> rsb
/// gencpp --> cppb
/// cppb --> l
/// rsb --> l
/// ```
///
/// # The `include_cpp` macro
///
/// Within the braces of the `include_cpp!{...}` macro, you should provide
/// a list of at least the following:
///
/// * `#include "cpp_header.h"`: a header filename to parse and include
/// * `generate!("type_or_function_name")`: a type or function name whose declaration
/// should be made available to C++.
/// * Optionally, `safety!(unsafe)` - see discussion of [`safety`].
///
/// Other directives are possible as documented in this crate.
///
/// Now, try to build your Rust project. `autocxx` may fail to generate bindings
/// for some of the items you specified with [generate] directives: remove
/// those directives for now, then see the next section for advice.
///
/// # Did it work? How do I deal with failure?
///
/// Once you've achieved a successful build, you might wonder how to know what
/// bindings have been generated. `cargo expand` will show you. In the (near) future,
/// it's hoped that `rust-analyzer` will gain support for expanding procedural
/// macros and you'll be able to see the bindings from Rust IDEs.
///
/// Either way, you'll find (for sure!) that `autocxx` hasn't been able to generate
/// bindings for all your C++ APIs. This may manifest as a hard failure or a soft
/// failure:
/// * If you specified such an item in a [`generate`] directive (or similar such
/// as [`generate_pod`]) then your build will fail.
/// * If such APIs are methods belonging to a type, `autocxx` will generate other
/// methods for the type but ignore those.
///
/// In this latter case, you should see helpful messages _in the generated bindings_
/// as rust documentation explaining what went wrong.
///
/// If this happens (and it will!) your options are:
/// * Add more, simpler C++ APIs which fulfil the same need but are compatible with
/// `autocxx`.
/// * Write manual bindings. This is most useful if a type is supported by [cxx]
/// but not `autocxx` (for example, at the time of writing `std::array`). See
/// the later section on 'combinining automatic and manual bindings'.
///
/// # The generated bindings
///
/// ## Pointers, references, and so-forth
///
/// `autocxx` knows how to deal with C++ APIs which take C++ types:
/// * By value
/// * By reference (const or not)
/// * By raw pointer
/// * By `std::unique_ptr`
/// * By `std::shared_ptr`
/// * By `std::weak_ptr`
///
/// (all of this is because the underlying [cxx] crate has such versatility).
/// Some of these have some quirks in the way they're exposed in Rust, described below.
///
/// ### Passing between C++ and Rust by value
///
/// Rust is free to move data around at any time. That's _not OK_ for some C++ types
/// which have non-trivial move constructors or destructors. Such types are common
/// in C++ (for example, even C++ `std::string`s) and these types commonly appear
/// in API declarations which we want to make available in Rust. Worse still, Rust
/// has no visibility into whether a C++ type meets these criteria. What do we do?
///
/// You have a choice:
/// * As standard, any C++ type passed by value will be `std::move`d on the C++ side
/// into a `std::unique_ptr` before being passed to Rust, and similarly moved out
/// of a `std::unique_ptr` when passed from Rust to C++.
/// * If you know that your C++ type can be safely byte-copied, then you can
/// override this behavior by using [`generate_pod`] instead of [`generate`].
///
/// There's not a significant ergonomic problem from the use of [`cxx::UniquePtr`].
/// The main negative of the automatic boxing into [`cxx::UniquePtr`] is performance:
/// specifiaclly, the need to
/// allocate heap cells on the C++ side and move data into and out of them.
/// You don't want to be doing this inside a tight loop (but if you're calling
/// across the C++/Rust boundary in a tight loop, perhaps reconsider that boundary
/// anyway).
///
/// If you want your type to be transferred between Rust and C++ truly _by value_
/// then use [`generate_pod`] instead of [`generate`].
///
/// Specifically, to be compatible with [`generate_pod`], your C++ type must either:
/// * Lack a move constructor _and_ lack a destructor
/// * Or contain a human promise that it's relocatable, by implementing
/// the C++ trait `IsRelocatable` per the instructions in
/// [cxx.h](https://github.com/dtolnay/cxx/blob/master/include/cxx.h)
///
/// Otherwise, your build will fail.
///
/// This doesn't just make a difference to the generated code for the type;
/// it also makes a difference to any functions which take or return that type.
/// If there's a C++ function which takes a struct by value, but that struct
/// is not declared as POD-safe, then we'll generate wrapper functions to move
/// that type into and out of [`cxx::UniquePtr`]s.
///
/// ### References and pointers
///
/// We follow [cxx] norms here. Specifically:
/// * A C++ reference becomes a Rust reference
/// * A C++ pointer becomes a Rust pointer.
/// * If a reference is returned with an ambiguous lifetime, we don't generate
/// code for the function
/// * Pointers require use of `unsafe`, references don't necessarily.
///
/// That last point is key. If your C++ API takes pointers, you're going
/// to have to use `unsafe`. Similarly, if your C++ API returns a pointer,
/// you'll have to use `unsafe` to do anything useful with the pointer in Rust.
/// This is intentional: a pointer from C++ might be subject to concurrent
/// mutation, or it might have a lifetime that could disappear at any moment.
/// As a human, you must promise that you understand the constraints around
/// use of that pointer and that's what the `unsafe` keyword is for.
///
/// Exactly the same issues apply to C++ references _in theory_, but in practice,
/// they usually don't. Therefore [cxx] has taken the view that we can "trust"
/// a C++ reference to a higher degree than a pointer, and autocxx follows that
/// lead. In practice, of course, references are rarely return values from C++
/// APIs so we rarely have to navel-gaze about the trustworthiness of a
/// reference.
///
/// (See also the discussion of [`safety`] - if you haven't specified
/// an unsafety policy, _all_ C++ APIs require `unsafe` so the discussion is moot.)
///
/// ### [`cxx::UniquePtr`]s
///
/// We use [`cxx::UniquePtr`] in completely the normal way, but there are a few
/// quirks which you're more likely to run into with `autocxx`.
///
/// * Calling methods: you may need to use [`cxx::UniquePtr::pin_mut`] to get
/// a reference on which you can call a method.
/// * Getting a raw pointer in order to pass to some pre-existing function:
/// at present you need to do:
/// ```rust,nocompile
/// let mut a = ffi::A::make_unique();
/// unsafe { ffi::TakePointerToA(std::pin::Pin::<&mut ffi::A>::into_inner_unchecked(a.pin_mut())) };
/// ```
/// This may be simplified in future.
///
/// ## Construction
///
/// Types gain a `make_unique` associated function. At present they only
/// gain this if they have an explicit C++ constructor; this is a limitation
/// which should be resolved in future.
/// This will (of course) return a [`cxx::UniquePtr`] containing that type.
///
/// ## Built-in types
///
/// The generated code uses `cxx` for interop: see that crate for many important
/// considerations including safety and the list of built-in types, for example
/// [`cxx::UniquePtr`] and [`cxx::CxxString`].
///
/// There are almost no `autocxx`-specific types. At present, we do have
/// [`c_int`] and similar, to wrap the integer types whose length
/// varies in C++. It's hoped to contribute full support here to [cxx]
/// in a future change.
///
/// ## Strings
///
/// `autocxx` uses [cxx::CxxString]. However, as noted above, we can't
/// just pass a C++ string by value, so we'll box and unbox it automatically
/// such that you're really dealing with `UniquePtr<CxxString>` on the Rust
/// side, even if the API just took or returned a plain old `std::string`.
///
/// However, to ease ergonomics, functions that accept a `std::string` will
/// actually accept anything that
/// implements a trait called `ffi::ToCppString`. That may either be a
/// `UniquePtr<CxxString>` or just a plain old Rust string - which will be
/// converted transparently to a C++ string.
///
/// This trait, and its implementations, are not present in the `autocxx`
/// documentation because they're dynamically generated in _your_ code
/// so that they can call through to a `make_string` implementation in
/// the C++ that we're injecting into your C++ build system.
///
/// (None of that happens if you use [exclude_utilities], so don't do that.)
///
/// If you need to create a blank `UniquePtr<CxxString>` in Rust, such that
/// (for example) you can pass its mutable reference or pointer into some
/// pre-existing C++ API, there's currently no built in support for that.
/// You should add an extra C++ API:
///
/// ```cpp
/// std::string make_blank_string() { return std::string(); }
/// ```
///
/// and then use [`generate`] to make bindings for that.
///
/// ## Preprocessor symbols
///
/// `#define` and other preprocessor symbols will appear as constants.
/// At present there is no way to do compile-time disablement of code
/// (equivalent of `#ifdef`).
///
/// ## Integer types
///
/// For C++ types with a defined size, just go ahead and use `u64`, `i32` etc.
/// For types such as `int` or `unsigned long`, the hope is that you can
/// eventually use `std::os::raw::c_int` oor `std::os::raw::c_ulong` etc.
/// For now, this doesn't quite work: instead you need to wrap these values
/// in a newtype wrapper such as [c_int] or [c_ulong] in this crate.
///
/// ## String constants
///
/// Whether from a preprocessor symbol or from a C++ `char*` constant,
/// strings appear as `[u8]` with a null terminator. To get a Rust string,
/// do this:
///
/// ```cpp
/// #define BOB "Hello"
/// ```
///
/// ```
/// # mod ffi { pub static BOB: [u8; 6] = [72u8, 101u8, 108u8, 108u8, 111u8, 0u8]; }
/// assert_eq!(std::str::from_utf8(&ffi::BOB).unwrap().trim_end_matches(char::from(0)), "Hello");
/// ```
///
/// ## Namespaces
///
/// The C++ namespace structure is reflected in mods within the generated
/// ffi mod. However, at present there is an internal limitation that
/// autocxx can't handle multiple symbols with the same identifier, even
/// if they're in different namespaces. This will be fixed in future.
///
/// ## Overloads - and identifiers ending in digits
///
/// C++ allows function overloads; Rust doesn't. `autocxx` follows the lead
/// of `bindgen` here and generating overloads as `func`, `func1`, `func2` etc.
/// This is essentially awful without `rust-analyzer` IDE support, which isn't
/// quite there yet.
///
/// `autocxx` doesn't yet support default paramters.
///
/// ## Forward declarations
///
/// A type which is incomplete in the C++ headers (i.e. represented only by a forward
/// declaration) can't be held in a `UniquePtr` within Rust (because Rust can't know
/// if it has a destructor that will need to be called if the object is `Drop`ped.)
/// Naturally, such an object can't be passed by value either; it can still be
/// referenced in Rust references.
///
/// ## Generic types
///
/// If you're using one of the generic types which is supported natively by cxx,
/// e.g. `std::unique_ptr`, it should work as you expect. For other generic types,
/// we synthesize a concrete Rust type, corresponding to a C++ typedef, for each
/// concrete instantiation of the type. Such generated types are always opaque,
/// and never have methods attached. That's therefore enough to pass them
/// between return types and parameters of other functions within [`cxx::UniquePtr`]s
/// but not really enough to do anything else with these types just yet. Hopefully,
/// this will be improved in future. At present such types have a name
/// `AutocxxConcrete{n}` but this may change in future.
///
/// ## Exceptions
///
/// Exceptions are not supported. If your C++ code is compiled with exceptions,
/// you can expect serious runtime explosions. The underlying [cxx] crate has
/// exception support, so it would be possible to add them.
///
/// # Mixing manual and automated bindings
///
/// `autocxx` uses [cxx] underneath, and its build process will happily spot and
/// process and manually-crafted [`cxx::bridge`] mods which you include in your
/// Rust source code. A common pattern good be to use `autocxx` to generate
/// all the bindings possible, then hand-craft a [`cxx::bridge`] mod for the
/// remainder where `autocxx` falls short.
///
/// To do this, you'll need to use the [ability of one cxx::bridge mod to refer to types from another](https://cxx.rs/extern-c++.html#reusing-existing-binding-types),
/// for example:
///
/// ```rust,nocompile
/// autocxx::include_cpp! {
/// #include "foo.h"
/// safety!(unsafe_ffi)
/// generate!("take_A")
/// generate!("A")
/// }
/// #[cxx::bridge]
/// mod ffi2 {
/// unsafe extern "C++" {
/// include!("foo.h");
/// type A = crate::ffi::A;
/// fn give_A() -> UniquePtr<A>; // in practice, autocxx could happily do this
/// }
/// }
/// fn main() {
/// let a = ffi2::give_A();
/// assert_eq!(ffi::take_A(&a), autocxx::c_int(5));
/// }
/// ```
///
/// # Safety
///
/// # Examples
///
/// * [Demo](https://github.com/google/autocxx/tree/main/demo) - simplest possible demo
/// * [S2 example](https://github.com/google/autocxx/tree/main/examples/s2) - example using S2 geometry library
/// * [Integration tests](https://github.com/google/autocxx/blob/main/engine/src/integration_tests.rs)
/// - hundreds of small snippets
///
/// Contributions of more examples to the `examples` directory are much appreciated!
///
/// # Internals
///
/// For documentation on how this all actually _works_, see
/// [IncludeCppEngine].
#[macro_export]
macro_rules! include_cpp {
(
$(#$include:ident $lit:literal)*
$($mac:ident!($($arg:tt)*))*
) => {
$($crate::$include!{__docs})*
$($crate::$mac!{__docs})*
$crate::include_cpp_impl! {
$(#include $lit)*
$($mac!($($arg)*))*
}
};
}
/// Include a C++ header. A directive to be included inside
/// [include_cpp] - see [include_cpp] for details
#[macro_export]
macro_rules! include {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate Rust bindings for the given C++ type or function.
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
/// See also [generate_pod].
#[macro_export]
macro_rules! generate {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate as "plain old data" and add to allowlist.
/// Generate Rust bindings for the given C++ type such that
/// it can be passed and owned by value in Rust. This only works
/// for C++ types which have trivial move constructors and no
/// destructor - you'll encounter a compile error otherwise.
/// If your type doesn't match that description, use [generate]
/// instead, and own the type using [UniquePtr][autocxx_engine::cxx::UniquePtr].
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! generate_pod {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate Rust bindings for all C++ types and functions
/// found. Highly experimental and not recommended.
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
/// See also [generate].
#[macro_export]
macro_rules! generate_all {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate as "plain old data". For use with [generate_all]
/// and similarly experimental.
#[macro_export]
macro_rules! pod {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Skip the normal generation of a `make_string` function
/// and other utilities which we might generate normally.
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! exclude_utilities {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Entirely block some type from appearing in the generated
/// code. This can be useful if there is a type which is not
/// understood by bindgen or autocxx, and incorrect code is
/// otherwise generated.
/// This is 'greedy' in the sense that any functions/methods
/// which take or return such a type will _also_ be blocked.
///
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! block {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// The name of the mod to be generated with the FFI code.
/// The default is `ffi`.
///
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! name {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Specifies a global safety policy for functions generated
/// from these headers. By default (without such a `safety!`
/// directive) all such functions are marked as `unsafe` and
/// therefore can only be called within an `unsafe {}` block
/// or some `unsafe` function which you create.
///
/// Alternatively, by specifying a `safety!` block you can
/// declare that most generated functions are in fact safe.
/// Specifically, you'd specify:
/// `safety!(unsafe)`
/// or
/// `safety!(unsafe_ffi)`
/// These two options are functionally identical. If you're
/// unsure, simply use `unsafe`. The reason for the
/// latter option is if you have code review policies which
/// might want to give a different level of scrutiny to
/// C++ interop as opposed to other types of unsafe Rust code.
/// Maybe in your organization, C++ interop is less scary than
/// a low-level Rust data structure using pointer manipulation.
/// Or maybe it's more scary. Either way, using `unsafe` for
/// the data structure and using `unsafe_ffi` for the C++
/// interop allows you to apply different linting tools and
/// policies to the different options.
///
/// Irrespective, C++ code is of course unsafe. It's worth
/// noting that use of C++ can cause unexpected unsafety at
/// a distance in faraway Rust code. As with any use of the
/// `unsafe` keyword in Rust, *you the human* are declaring
/// that you've analyzed all possible ways that the code
/// can be used and you are guaranteeing to the compiler that
/// no badness can occur. Good luck.
///
/// Generated C++ APIs which use raw pointers remain `unsafe`
/// no matter what policy you choose.
#[macro_export]
macro_rules! safety {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Whether to avoid generating [`cxx::UniquePtr`] and [`cxx::Vector`]
/// implementations. This is primarily useful for reducing test cases and
/// shouldn't be used in normal operation.
///
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! exclude_impls {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
#[doc(hidden)]
#[macro_export]
macro_rules! usage {
(__docs) => {};
($($tt:tt)*) => {
compile_error! {r#"usage: include_cpp! {
#include "path/to/header.h"
generate!(...)
generate_pod!(...)
}
"#}
};
}
#[doc(hidden)]
pub use autocxx_macro::include_cpp_impl;
macro_rules! ctype_wrapper {
($r:ident, $c:expr, $d:expr) => {
#[doc=$d]
#[derive(Debug, Eq, Clone, PartialEq, Hash)]
#[allow(non_camel_case_types)]
#[repr(transparent)]
pub struct $r(pub ::std::os::raw::$r);
unsafe impl autocxx_engine::cxx::ExternType for $r {
type Id = autocxx_engine::cxx::type_id!($c);
type Kind = autocxx_engine::cxx::kind::Trivial;
}
};
}
ctype_wrapper!(
c_ulonglong,
"c_ulonglong",
"Newtype wrapper for an unsigned long long"
);
ctype_wrapper!(c_longlong, "c_longlong", "Newtype wrapper for a long long");
ctype_wrapper!(c_ulong, "c_ulong", "Newtype wrapper for an unsigned long");
ctype_wrapper!(c_long, "c_long", "Newtype wrapper for a long");
ctype_wrapper!(
c_ushort,
"c_ushort",
"Newtype wrapper for an unsigned short"
);
ctype_wrapper!(c_short, "c_short", "Newtype wrapper for an short");
ctype_wrapper!(c_uint, "c_uint", "Newtype wrapper for an unsigned int");
ctype_wrapper!(c_int, "c_int", "Newtype wrapper for an int");
ctype_wrapper!(c_uchar, "c_uchar", "Newtype wrapper for an unsigned char");
/// Newtype wrapper for a C void. Only useful as a `*c_void`
#[allow(non_camel_case_types)]
#[repr(transparent)]
pub struct c_void(pub ::std::os::raw::c_void);
unsafe impl autocxx_engine::cxx::ExternType for c_void {
type Id = autocxx_engine::cxx::type_id!(c_void);
type Kind = autocxx_engine::cxx::kind::Trivial;
}
/// autocxx couldn't generate these bindings.
/// If you come across a method, type or function which refers to this type,
/// it indicates that autocxx couldn't generate that binding. A documentation
/// comment should be attached indicating the reason.
pub struct BindingGenerationFailure {
_unallocatable: [*const u8; 0],
_pinned: core::marker::PhantomData<core::marker::PhantomPinned>,
}
Fix doctest ignore syntax.
//! Crate to `#include` C++ headers in your Rust code, and generate
//! idiomatic bindings using `cxx`. See [include_cpp] for details.
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// The crazy macro_rules magic in this file is thanks to dtolnay@
// and is a way of attaching rustdoc to each of the possible directives
// within the include_cpp outer macro. None of the directives actually
// do anything - all the magic is handled entirely by
// autocxx_macro::include_cpp_impl.
#[allow(unused_imports)] // doc cross-reference only
use autocxx_engine::IncludeCppEngine;
#[cfg_attr(doc, aquamarine::aquamarine)]
/// Include some C++ headers in your Rust project.
///
/// This macro allows you to include one or more C++ headers within
/// your Rust code, and call their functions fairly naturally.
///
/// # Examples
///
/// C++ header (`input.h`):
/// ```cpp
/// #include <cstdint>
///
/// uint32_t do_math(uint32_t a);
/// ```
///
/// Rust code:
/// ```
/// # use autocxx_macro::include_cpp_impl as include_cpp;
/// include_cpp!(
/// # parse_only!()
/// #include "input.h"
/// generate!("do_math")
/// safety!(unsafe)
/// );
///
/// # mod ffi { pub fn do_math(a: u32) -> u32 { a+3 } }
/// # fn main() {
/// ffi::do_math(3);
/// # }
/// ```
///
/// The resulting bindings will use idiomatic Rust wrappers for types from the [cxx]
/// crate, for example [cxx::UniquePtr] or [cxx::CxxString]. Due to the care and thought
/// that's gone into the [cxx] crate, such bindings are pleasant and idiomatic to use
/// from Rust, and usually don't require the `unsafe` keyword.
///
/// # User manual - introduction
///
/// [include_cpp] tries to make it possible to include C++ headers and use declared functions
/// and types as-is. The resulting bindings use wrappers for C++ STL types from the [cxx]
/// crate such as [cxx::UniquePtr] or [cxx::CxxString].
///
/// Why, then, do you need a manual? Three reasons:
///
/// * This manual will describe how to include `autocxx` in your build process.
/// * `autocxx` chooses to generate Rust bindings for C++ APIs in particular ways,
/// over which you have _some_ control. The manual discusses what and how.
/// * The combination of `autocxx` and [cxx] are not perfect. There are some STL
/// types and some fundamental C++ features which are not yet supported. Where that occurs,
/// you may need to create some manual bindings or otherwise workaround deficiencies.
/// This manual tells you how to spot such circumstances and work around them.
///
/// # Overview
///
/// Here's how to approach autocxx:
///
/// ```mermaid
/// flowchart TB
/// %%{init:{'flowchart':{'nodeSpacing': 60, 'rankSpacing': 30}}}%%
/// autocxx[Add a dependency on autocxx in your project]
/// which-build([Do you use cargo?])
/// autocxx--->which-build
/// autocxx-build[Add a dev dependency on autocxx-build]
/// build-rs[In your build.rs, tell autocxx-build about your header include path]
/// autocxx-build--->build-rs
/// which-build-- Yes -->autocxx-build
/// macro[Add include_cpp! macro: list headers and allowlist]
/// build-rs--->macro
/// autocxx-gen[Use autocxx-gen command line tool]
/// which-build-- No -->autocxx-gen
/// autocxx-gen--->macro
/// build[Build]
/// macro--->build
/// check[Confirm generation using cargo expand]
/// build--->check
/// manual[Add manual cxx::bridge for anything missing]
/// check--->manual
/// use[Use generated ffi mod APIs]
/// manual--->use
/// ```
///
/// # Configuring the build - if you're using cargo
///
/// You'll use the `autocxx-build` crate. Simply copy from the
/// [demo example](https://github.com/google/autocxx/blob/main/demo/build.rs).
/// You'll need to provide it:
/// * The list of `.rs` files which will have `include_cpp!` macros present
/// * Your C++ header include path.
///
/// # Configuring the build - if you're not using cargo
///
/// See the `autocxx-gen` crate. You'll need to:
///
/// * Run the `codegen` phase. You'll need to use the [autocxx-gen]
/// tool to process the .rs code into C++ header and
/// implementation files. This will also generate `.rs` side bindings.
/// * Educate the procedural macro about where to find the generated `.rs` bindings. Set the
/// `AUTOCXX_RS` environment variable to a list of directories to search.
/// If you use `autocxx-build`, this happens automatically. (You can alternatively
/// specify `AUTOCXX_RS_FILE` to give a precise filename as opposed to a directory to search,
/// though this isn't recommended unless your build system specifically requires it
/// because it allows only a single `include_cpp!` block per `.rs` file.)
///
/// ```mermaid
/// flowchart TB
/// s(Rust source with include_cpp!)
/// c(Existing C++ headers)
/// cg(autocxx-gen or autocxx-build)
/// genrs(Generated .rs file)
/// gencpp(Generated .cpp and .h files)
/// rsb(Rust/Cargo build)
/// cppb(C++ build)
/// l(Linker)
/// s --> cg
/// c --> cg
/// cg --> genrs
/// cg --> gencpp
/// m(autocxx-macro)
/// s --> m
/// genrs-. included .->m
/// m --> rsb
/// gencpp --> cppb
/// cppb --> l
/// rsb --> l
/// ```
///
/// # The `include_cpp` macro
///
/// Within the braces of the `include_cpp!{...}` macro, you should provide
/// a list of at least the following:
///
/// * `#include "cpp_header.h"`: a header filename to parse and include
/// * `generate!("type_or_function_name")`: a type or function name whose declaration
/// should be made available to C++.
/// * Optionally, `safety!(unsafe)` - see discussion of [`safety`].
///
/// Other directives are possible as documented in this crate.
///
/// Now, try to build your Rust project. `autocxx` may fail to generate bindings
/// for some of the items you specified with [generate] directives: remove
/// those directives for now, then see the next section for advice.
///
/// # Did it work? How do I deal with failure?
///
/// Once you've achieved a successful build, you might wonder how to know what
/// bindings have been generated. `cargo expand` will show you. In the (near) future,
/// it's hoped that `rust-analyzer` will gain support for expanding procedural
/// macros and you'll be able to see the bindings from Rust IDEs.
///
/// Either way, you'll find (for sure!) that `autocxx` hasn't been able to generate
/// bindings for all your C++ APIs. This may manifest as a hard failure or a soft
/// failure:
/// * If you specified such an item in a [`generate`] directive (or similar such
/// as [`generate_pod`]) then your build will fail.
/// * If such APIs are methods belonging to a type, `autocxx` will generate other
/// methods for the type but ignore those.
///
/// In this latter case, you should see helpful messages _in the generated bindings_
/// as rust documentation explaining what went wrong.
///
/// If this happens (and it will!) your options are:
/// * Add more, simpler C++ APIs which fulfil the same need but are compatible with
/// `autocxx`.
/// * Write manual bindings. This is most useful if a type is supported by [cxx]
/// but not `autocxx` (for example, at the time of writing `std::array`). See
/// the later section on 'combinining automatic and manual bindings'.
///
/// # The generated bindings
///
/// ## Pointers, references, and so-forth
///
/// `autocxx` knows how to deal with C++ APIs which take C++ types:
/// * By value
/// * By reference (const or not)
/// * By raw pointer
/// * By `std::unique_ptr`
/// * By `std::shared_ptr`
/// * By `std::weak_ptr`
///
/// (all of this is because the underlying [cxx] crate has such versatility).
/// Some of these have some quirks in the way they're exposed in Rust, described below.
///
/// ### Passing between C++ and Rust by value
///
/// Rust is free to move data around at any time. That's _not OK_ for some C++ types
/// which have non-trivial move constructors or destructors. Such types are common
/// in C++ (for example, even C++ `std::string`s) and these types commonly appear
/// in API declarations which we want to make available in Rust. Worse still, Rust
/// has no visibility into whether a C++ type meets these criteria. What do we do?
///
/// You have a choice:
/// * As standard, any C++ type passed by value will be `std::move`d on the C++ side
/// into a `std::unique_ptr` before being passed to Rust, and similarly moved out
/// of a `std::unique_ptr` when passed from Rust to C++.
/// * If you know that your C++ type can be safely byte-copied, then you can
/// override this behavior by using [`generate_pod`] instead of [`generate`].
///
/// There's not a significant ergonomic problem from the use of [`cxx::UniquePtr`].
/// The main negative of the automatic boxing into [`cxx::UniquePtr`] is performance:
/// specifiaclly, the need to
/// allocate heap cells on the C++ side and move data into and out of them.
/// You don't want to be doing this inside a tight loop (but if you're calling
/// across the C++/Rust boundary in a tight loop, perhaps reconsider that boundary
/// anyway).
///
/// If you want your type to be transferred between Rust and C++ truly _by value_
/// then use [`generate_pod`] instead of [`generate`].
///
/// Specifically, to be compatible with [`generate_pod`], your C++ type must either:
/// * Lack a move constructor _and_ lack a destructor
/// * Or contain a human promise that it's relocatable, by implementing
/// the C++ trait `IsRelocatable` per the instructions in
/// [cxx.h](https://github.com/dtolnay/cxx/blob/master/include/cxx.h)
///
/// Otherwise, your build will fail.
///
/// This doesn't just make a difference to the generated code for the type;
/// it also makes a difference to any functions which take or return that type.
/// If there's a C++ function which takes a struct by value, but that struct
/// is not declared as POD-safe, then we'll generate wrapper functions to move
/// that type into and out of [`cxx::UniquePtr`]s.
///
/// ### References and pointers
///
/// We follow [cxx] norms here. Specifically:
/// * A C++ reference becomes a Rust reference
/// * A C++ pointer becomes a Rust pointer.
/// * If a reference is returned with an ambiguous lifetime, we don't generate
/// code for the function
/// * Pointers require use of `unsafe`, references don't necessarily.
///
/// That last point is key. If your C++ API takes pointers, you're going
/// to have to use `unsafe`. Similarly, if your C++ API returns a pointer,
/// you'll have to use `unsafe` to do anything useful with the pointer in Rust.
/// This is intentional: a pointer from C++ might be subject to concurrent
/// mutation, or it might have a lifetime that could disappear at any moment.
/// As a human, you must promise that you understand the constraints around
/// use of that pointer and that's what the `unsafe` keyword is for.
///
/// Exactly the same issues apply to C++ references _in theory_, but in practice,
/// they usually don't. Therefore [cxx] has taken the view that we can "trust"
/// a C++ reference to a higher degree than a pointer, and autocxx follows that
/// lead. In practice, of course, references are rarely return values from C++
/// APIs so we rarely have to navel-gaze about the trustworthiness of a
/// reference.
///
/// (See also the discussion of [`safety`] - if you haven't specified
/// an unsafety policy, _all_ C++ APIs require `unsafe` so the discussion is moot.)
///
/// ### [`cxx::UniquePtr`]s
///
/// We use [`cxx::UniquePtr`] in completely the normal way, but there are a few
/// quirks which you're more likely to run into with `autocxx`.
///
/// * Calling methods: you may need to use [`cxx::UniquePtr::pin_mut`] to get
/// a reference on which you can call a method.
/// * Getting a raw pointer in order to pass to some pre-existing function:
/// at present you need to do:
/// ```rust,ignore
/// let mut a = ffi::A::make_unique();
/// unsafe { ffi::TakePointerToA(std::pin::Pin::<&mut ffi::A>::into_inner_unchecked(a.pin_mut())) };
/// ```
/// This may be simplified in future.
///
/// ## Construction
///
/// Types gain a `make_unique` associated function. At present they only
/// gain this if they have an explicit C++ constructor; this is a limitation
/// which should be resolved in future.
/// This will (of course) return a [`cxx::UniquePtr`] containing that type.
///
/// ## Built-in types
///
/// The generated code uses `cxx` for interop: see that crate for many important
/// considerations including safety and the list of built-in types, for example
/// [`cxx::UniquePtr`] and [`cxx::CxxString`].
///
/// There are almost no `autocxx`-specific types. At present, we do have
/// [`c_int`] and similar, to wrap the integer types whose length
/// varies in C++. It's hoped to contribute full support here to [cxx]
/// in a future change.
///
/// ## Strings
///
/// `autocxx` uses [cxx::CxxString]. However, as noted above, we can't
/// just pass a C++ string by value, so we'll box and unbox it automatically
/// such that you're really dealing with `UniquePtr<CxxString>` on the Rust
/// side, even if the API just took or returned a plain old `std::string`.
///
/// However, to ease ergonomics, functions that accept a `std::string` will
/// actually accept anything that
/// implements a trait called `ffi::ToCppString`. That may either be a
/// `UniquePtr<CxxString>` or just a plain old Rust string - which will be
/// converted transparently to a C++ string.
///
/// This trait, and its implementations, are not present in the `autocxx`
/// documentation because they're dynamically generated in _your_ code
/// so that they can call through to a `make_string` implementation in
/// the C++ that we're injecting into your C++ build system.
///
/// (None of that happens if you use [exclude_utilities], so don't do that.)
///
/// If you need to create a blank `UniquePtr<CxxString>` in Rust, such that
/// (for example) you can pass its mutable reference or pointer into some
/// pre-existing C++ API, there's currently no built in support for that.
/// You should add an extra C++ API:
///
/// ```cpp
/// std::string make_blank_string() { return std::string(); }
/// ```
///
/// and then use [`generate`] to make bindings for that.
///
/// ## Preprocessor symbols
///
/// `#define` and other preprocessor symbols will appear as constants.
/// At present there is no way to do compile-time disablement of code
/// (equivalent of `#ifdef`).
///
/// ## Integer types
///
/// For C++ types with a defined size, just go ahead and use `u64`, `i32` etc.
/// For types such as `int` or `unsigned long`, the hope is that you can
/// eventually use `std::os::raw::c_int` oor `std::os::raw::c_ulong` etc.
/// For now, this doesn't quite work: instead you need to wrap these values
/// in a newtype wrapper such as [c_int] or [c_ulong] in this crate.
///
/// ## String constants
///
/// Whether from a preprocessor symbol or from a C++ `char*` constant,
/// strings appear as `[u8]` with a null terminator. To get a Rust string,
/// do this:
///
/// ```cpp
/// #define BOB "Hello"
/// ```
///
/// ```
/// # mod ffi { pub static BOB: [u8; 6] = [72u8, 101u8, 108u8, 108u8, 111u8, 0u8]; }
/// assert_eq!(std::str::from_utf8(&ffi::BOB).unwrap().trim_end_matches(char::from(0)), "Hello");
/// ```
///
/// ## Namespaces
///
/// The C++ namespace structure is reflected in mods within the generated
/// ffi mod. However, at present there is an internal limitation that
/// autocxx can't handle multiple symbols with the same identifier, even
/// if they're in different namespaces. This will be fixed in future.
///
/// ## Overloads - and identifiers ending in digits
///
/// C++ allows function overloads; Rust doesn't. `autocxx` follows the lead
/// of `bindgen` here and generating overloads as `func`, `func1`, `func2` etc.
/// This is essentially awful without `rust-analyzer` IDE support, which isn't
/// quite there yet.
///
/// `autocxx` doesn't yet support default paramters.
///
/// ## Forward declarations
///
/// A type which is incomplete in the C++ headers (i.e. represented only by a forward
/// declaration) can't be held in a `UniquePtr` within Rust (because Rust can't know
/// if it has a destructor that will need to be called if the object is `Drop`ped.)
/// Naturally, such an object can't be passed by value either; it can still be
/// referenced in Rust references.
///
/// ## Generic types
///
/// If you're using one of the generic types which is supported natively by cxx,
/// e.g. `std::unique_ptr`, it should work as you expect. For other generic types,
/// we synthesize a concrete Rust type, corresponding to a C++ typedef, for each
/// concrete instantiation of the type. Such generated types are always opaque,
/// and never have methods attached. That's therefore enough to pass them
/// between return types and parameters of other functions within [`cxx::UniquePtr`]s
/// but not really enough to do anything else with these types just yet. Hopefully,
/// this will be improved in future. At present such types have a name
/// `AutocxxConcrete{n}` but this may change in future.
///
/// ## Exceptions
///
/// Exceptions are not supported. If your C++ code is compiled with exceptions,
/// you can expect serious runtime explosions. The underlying [cxx] crate has
/// exception support, so it would be possible to add them.
///
/// # Mixing manual and automated bindings
///
/// `autocxx` uses [cxx] underneath, and its build process will happily spot and
/// process and manually-crafted [`cxx::bridge`] mods which you include in your
/// Rust source code. A common pattern good be to use `autocxx` to generate
/// all the bindings possible, then hand-craft a [`cxx::bridge`] mod for the
/// remainder where `autocxx` falls short.
///
/// To do this, you'll need to use the [ability of one cxx::bridge mod to refer to types from another](https://cxx.rs/extern-c++.html#reusing-existing-binding-types),
/// for example:
///
/// ```rust,ignore
/// autocxx::include_cpp! {
/// #include "foo.h"
/// safety!(unsafe_ffi)
/// generate!("take_A")
/// generate!("A")
/// }
/// #[cxx::bridge]
/// mod ffi2 {
/// unsafe extern "C++" {
/// include!("foo.h");
/// type A = crate::ffi::A;
/// fn give_A() -> UniquePtr<A>; // in practice, autocxx could happily do this
/// }
/// }
/// fn main() {
/// let a = ffi2::give_A();
/// assert_eq!(ffi::take_A(&a), autocxx::c_int(5));
/// }
/// ```
///
/// # Safety
///
/// # Examples
///
/// * [Demo](https://github.com/google/autocxx/tree/main/demo) - simplest possible demo
/// * [S2 example](https://github.com/google/autocxx/tree/main/examples/s2) - example using S2 geometry library
/// * [Integration tests](https://github.com/google/autocxx/blob/main/engine/src/integration_tests.rs)
/// - hundreds of small snippets
///
/// Contributions of more examples to the `examples` directory are much appreciated!
///
/// # Internals
///
/// For documentation on how this all actually _works_, see
/// [IncludeCppEngine].
#[macro_export]
macro_rules! include_cpp {
(
$(#$include:ident $lit:literal)*
$($mac:ident!($($arg:tt)*))*
) => {
$($crate::$include!{__docs})*
$($crate::$mac!{__docs})*
$crate::include_cpp_impl! {
$(#include $lit)*
$($mac!($($arg)*))*
}
};
}
/// Include a C++ header. A directive to be included inside
/// [include_cpp] - see [include_cpp] for details
#[macro_export]
macro_rules! include {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate Rust bindings for the given C++ type or function.
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
/// See also [generate_pod].
#[macro_export]
macro_rules! generate {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate as "plain old data" and add to allowlist.
/// Generate Rust bindings for the given C++ type such that
/// it can be passed and owned by value in Rust. This only works
/// for C++ types which have trivial move constructors and no
/// destructor - you'll encounter a compile error otherwise.
/// If your type doesn't match that description, use [generate]
/// instead, and own the type using [UniquePtr][autocxx_engine::cxx::UniquePtr].
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! generate_pod {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate Rust bindings for all C++ types and functions
/// found. Highly experimental and not recommended.
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
/// See also [generate].
#[macro_export]
macro_rules! generate_all {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Generate as "plain old data". For use with [generate_all]
/// and similarly experimental.
#[macro_export]
macro_rules! pod {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Skip the normal generation of a `make_string` function
/// and other utilities which we might generate normally.
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! exclude_utilities {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Entirely block some type from appearing in the generated
/// code. This can be useful if there is a type which is not
/// understood by bindgen or autocxx, and incorrect code is
/// otherwise generated.
/// This is 'greedy' in the sense that any functions/methods
/// which take or return such a type will _also_ be blocked.
///
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! block {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// The name of the mod to be generated with the FFI code.
/// The default is `ffi`.
///
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! name {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Specifies a global safety policy for functions generated
/// from these headers. By default (without such a `safety!`
/// directive) all such functions are marked as `unsafe` and
/// therefore can only be called within an `unsafe {}` block
/// or some `unsafe` function which you create.
///
/// Alternatively, by specifying a `safety!` block you can
/// declare that most generated functions are in fact safe.
/// Specifically, you'd specify:
/// `safety!(unsafe)`
/// or
/// `safety!(unsafe_ffi)`
/// These two options are functionally identical. If you're
/// unsure, simply use `unsafe`. The reason for the
/// latter option is if you have code review policies which
/// might want to give a different level of scrutiny to
/// C++ interop as opposed to other types of unsafe Rust code.
/// Maybe in your organization, C++ interop is less scary than
/// a low-level Rust data structure using pointer manipulation.
/// Or maybe it's more scary. Either way, using `unsafe` for
/// the data structure and using `unsafe_ffi` for the C++
/// interop allows you to apply different linting tools and
/// policies to the different options.
///
/// Irrespective, C++ code is of course unsafe. It's worth
/// noting that use of C++ can cause unexpected unsafety at
/// a distance in faraway Rust code. As with any use of the
/// `unsafe` keyword in Rust, *you the human* are declaring
/// that you've analyzed all possible ways that the code
/// can be used and you are guaranteeing to the compiler that
/// no badness can occur. Good luck.
///
/// Generated C++ APIs which use raw pointers remain `unsafe`
/// no matter what policy you choose.
#[macro_export]
macro_rules! safety {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
/// Whether to avoid generating [`cxx::UniquePtr`] and [`cxx::Vector`]
/// implementations. This is primarily useful for reducing test cases and
/// shouldn't be used in normal operation.
///
/// A directive to be included inside
/// [include_cpp] - see [include_cpp] for general information.
#[macro_export]
macro_rules! exclude_impls {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
}
#[doc(hidden)]
#[macro_export]
macro_rules! usage {
(__docs) => {};
($($tt:tt)*) => {
compile_error! {r#"usage: include_cpp! {
#include "path/to/header.h"
generate!(...)
generate_pod!(...)
}
"#}
};
}
#[doc(hidden)]
pub use autocxx_macro::include_cpp_impl;
macro_rules! ctype_wrapper {
($r:ident, $c:expr, $d:expr) => {
#[doc=$d]
#[derive(Debug, Eq, Clone, PartialEq, Hash)]
#[allow(non_camel_case_types)]
#[repr(transparent)]
pub struct $r(pub ::std::os::raw::$r);
unsafe impl autocxx_engine::cxx::ExternType for $r {
type Id = autocxx_engine::cxx::type_id!($c);
type Kind = autocxx_engine::cxx::kind::Trivial;
}
};
}
ctype_wrapper!(
c_ulonglong,
"c_ulonglong",
"Newtype wrapper for an unsigned long long"
);
ctype_wrapper!(c_longlong, "c_longlong", "Newtype wrapper for a long long");
ctype_wrapper!(c_ulong, "c_ulong", "Newtype wrapper for an unsigned long");
ctype_wrapper!(c_long, "c_long", "Newtype wrapper for a long");
ctype_wrapper!(
c_ushort,
"c_ushort",
"Newtype wrapper for an unsigned short"
);
ctype_wrapper!(c_short, "c_short", "Newtype wrapper for an short");
ctype_wrapper!(c_uint, "c_uint", "Newtype wrapper for an unsigned int");
ctype_wrapper!(c_int, "c_int", "Newtype wrapper for an int");
ctype_wrapper!(c_uchar, "c_uchar", "Newtype wrapper for an unsigned char");
/// Newtype wrapper for a C void. Only useful as a `*c_void`
#[allow(non_camel_case_types)]
#[repr(transparent)]
pub struct c_void(pub ::std::os::raw::c_void);
unsafe impl autocxx_engine::cxx::ExternType for c_void {
type Id = autocxx_engine::cxx::type_id!(c_void);
type Kind = autocxx_engine::cxx::kind::Trivial;
}
/// autocxx couldn't generate these bindings.
/// If you come across a method, type or function which refers to this type,
/// it indicates that autocxx couldn't generate that binding. A documentation
/// comment should be attached indicating the reason.
pub struct BindingGenerationFailure {
_unallocatable: [*const u8; 0],
_pinned: core::marker::PhantomData<core::marker::PhantomPinned>,
}
|
use std::convert::Into;
use std::str::FromStr;
const BYTE: i64 = 1;
const KILOBYTE: i64 = BYTE << 10;
const MEGABYTE: i64 = KILOBYTE << 10;
const GIGABYTE: i64 = MEGABYTE << 10;
const TERABYTE: i64 = GIGABYTE << 10;
const PETABYTE: i64 = TERABYTE << 10;
const EXABYTE: i64 = PETABYTE << 10;
pub trait Capacity {
fn bytes(&self) -> i64;
fn kilobytes(&self) -> i64 {
self.bytes().rotate_left(10)
}
fn megabytes(&self) -> i64 {
self.kilobytes().rotate_left(10)
}
fn gigabytes(&self) -> i64 {
self.megabytes().rotate_left(10)
}
fn terabytes(&self) -> i64 {
self.gigabytes().rotate_left(10)
}
fn petabytes(&self) -> i64 {
self.terabytes().rotate_left(10)
}
fn exabytes(&self) -> i64 {
self.petabytes().rotate_left(10)
}
fn capacity(&self) -> String {
match self.bytes() {
small if small < KILOBYTE =>
stringify!(small).to_owned(),
large => {
let units = vec![
('E', EXABYTE),
('P', PETABYTE),
('T', TERABYTE),
('G', GIGABYTE),
('M', MEGABYTE),
('K', KILOBYTE)
];
for (suffix, size) in units {
if large == size {
return format!("1{}", suffix)
} else if large > size {
let sized = (large as f64) / (size as f64);
let round = (sized * 100.0).round() / 100.0;
return format!("{:.1}{}", round, suffix)
}
}
unreachable!()
}
}
}
}
impl Capacity for i64 {
fn bytes(&self) -> i64 {
*self
}
}
impl Capacity for Bytes {
fn bytes(&self) -> i64 {
(*self).size
}
}
pub struct Bytes {
size: i64
}
impl Into<Bytes> for i64 {
fn into(self) -> Bytes {
Bytes { size: self }
}
}
impl FromStr for Bytes {
type Err = String;
fn from_str(s: &str) -> Result<Bytes, String> {
Err(s.to_owned())
}
}
#[test]
fn test_bytes() {
assert_eq!(1.bytes(), 1);
}
#[test]
fn test_kilobytes() {
assert_eq!(1.kilobytes(), 1024)
}
#[test]
fn test_megabytes() {
assert_eq!(1.megabytes(), 1048576)
}
#[test]
fn test_gigabytes() {
assert_eq!(1.gigabytes(), 1073741824)
}
#[test]
fn test_terabytes() {
assert_eq!(1.terabytes(), 1099511627776)
}
#[test]
fn test_petabytes() {
assert_eq!(1.petabytes(), 1125899906842624)
}
#[test]
fn test_exabytes() {
assert_eq!(1.exabytes(), 1152921504606846976)
}
#[test]
fn test_kilobytes_capactity() {
let half = 1.kilobytes() / 2;
assert_eq!((1.kilobytes() + half).capacity(), "1.5K".to_owned())
}
#[test]
fn test_megabytes_capactity() {
let half = 1.megabytes() / 2;
assert_eq!((1.megabytes() + half).capacity(), "1.5M".to_owned())
}
#[test]
fn test_gigabytes_capactity() {
let half = 1.gigabytes() / 2;
assert_eq!((1.gigabytes() + half).capacity(), "1.5G".to_owned())
}
#[test]
fn test_terabytes_capactity() {
let half = 1.terabytes() / 2;
assert_eq!((1.terabytes() + half).capacity(), "1.5T".to_owned())
}
#[test]
fn test_petabytes_capactity() {
let half = 1.petabytes() / 2;
assert_eq!((1.petabytes() + half).capacity(), "1.5P".to_owned())
}
#[test]
fn test_exabytes_capactity() {
let half = 1.exabytes() / 2;
assert_eq!((1.exabytes() + half).capacity(), "1.5E".to_owned())
}
start thinking about this
use std::convert::Into;
use std::str::FromStr;
const BYTE: i64 = 1;
const KILOBYTE: i64 = BYTE << 10;
const MEGABYTE: i64 = KILOBYTE << 10;
const GIGABYTE: i64 = MEGABYTE << 10;
const TERABYTE: i64 = GIGABYTE << 10;
const PETABYTE: i64 = TERABYTE << 10;
const EXABYTE: i64 = PETABYTE << 10;
pub trait Capacity {
fn bytes(&self) -> i64;
fn kilobytes(&self) -> i64 {
self.bytes().rotate_left(10)
}
fn megabytes(&self) -> i64 {
self.kilobytes().rotate_left(10)
}
fn gigabytes(&self) -> i64 {
self.megabytes().rotate_left(10)
}
fn terabytes(&self) -> i64 {
self.gigabytes().rotate_left(10)
}
fn petabytes(&self) -> i64 {
self.terabytes().rotate_left(10)
}
fn exabytes(&self) -> i64 {
self.petabytes().rotate_left(10)
}
fn capacity(&self) -> String {
match self.bytes() {
small if small < KILOBYTE =>
stringify!(small).to_owned(),
large => {
let units = vec![
('E', EXABYTE),
('P', PETABYTE),
('T', TERABYTE),
('G', GIGABYTE),
('M', MEGABYTE),
('K', KILOBYTE)
];
for (suffix, size) in units {
if large == size {
return format!("1{}", suffix)
} else if large > size {
let sized = (large as f64) / (size as f64);
let round = (sized * 100.0).round() / 100.0;
return format!("{:.1}{}", round, suffix)
}
}
unreachable!()
}
}
}
}
impl Capacity for i64 {
fn bytes(&self) -> i64 {
*self
}
}
impl Capacity for Bytes {
fn bytes(&self) -> i64 {
(*self).size
}
}
pub struct Bytes {
size: i64
}
impl Into<Bytes> for i64 {
fn into(self) -> Bytes {
Bytes { size: self }
}
}
impl FromStr for Bytes {
type Err = String;
fn from_str(s: &str) -> Result<Bytes, String> {
match s {
"" | "0" => Ok(Bytes { size: 0 }),
_ => {
Err(s.to_owned())
}
}
}
}
#[test]
fn test_bytes() {
assert_eq!(1.bytes(), 1);
}
#[test]
fn test_kilobytes() {
assert_eq!(1.kilobytes(), 1024)
}
#[test]
fn test_megabytes() {
assert_eq!(1.megabytes(), 1048576)
}
#[test]
fn test_gigabytes() {
assert_eq!(1.gigabytes(), 1073741824)
}
#[test]
fn test_terabytes() {
assert_eq!(1.terabytes(), 1099511627776)
}
#[test]
fn test_petabytes() {
assert_eq!(1.petabytes(), 1125899906842624)
}
#[test]
fn test_exabytes() {
assert_eq!(1.exabytes(), 1152921504606846976)
}
#[test]
fn test_kilobytes_capactity() {
let half = 1.kilobytes() / 2;
assert_eq!((1.kilobytes() + half).capacity(), "1.5K".to_owned())
}
#[test]
fn test_megabytes_capactity() {
let half = 1.megabytes() / 2;
assert_eq!((1.megabytes() + half).capacity(), "1.5M".to_owned())
}
#[test]
fn test_gigabytes_capactity() {
let half = 1.gigabytes() / 2;
assert_eq!((1.gigabytes() + half).capacity(), "1.5G".to_owned())
}
#[test]
fn test_terabytes_capactity() {
let half = 1.terabytes() / 2;
assert_eq!((1.terabytes() + half).capacity(), "1.5T".to_owned())
}
#[test]
fn test_petabytes_capactity() {
let half = 1.petabytes() / 2;
assert_eq!((1.petabytes() + half).capacity(), "1.5P".to_owned())
}
#[test]
fn test_exabytes_capactity() {
let half = 1.exabytes() / 2;
assert_eq!((1.exabytes() + half).capacity(), "1.5E".to_owned())
}
|
//! SocketCAN support.
//!
//! The Linux kernel supports using CAN-devices through a network-like API
//! (see https://www.kernel.org/doc/Documentation/networking/can.txt). This
//! crate allows easy access to this functionality without having to wrestle
//! libc calls.
//!
//! # An introduction to CAN
//!
//! The CAN bus was originally designed to allow microcontrollers inside a
//! vehicle to communicate over a single shared bus. Messages called
//! *frames* are multicast to all devices on the bus.
//!
//! Every frame consists of an ID and a payload of up to 8 bytes. If two
//! devices attempt to send a frame at the same time, the device with the
//! higher ID will notice the conflict, stop sending and reattempt to sent its
//! frame in the next time slot. This means that the lower the ID, the higher
//! the priority. Since most devices have a limited buffer for outgoing frames,
//! a single device with a high priority (== low ID) can block communication
//! on that bus by sending messages too fast.
//!
//! The Linux socketcan subsystem makes the CAN bus available as a regular
//! networking device. Opening an network interface allows receiving all CAN
//! messages received on it. A device CAN be opened multiple times, every
//! client will receive all CAN frames simultaneously.
//!
//! Similarly, CAN frames can be sent to the bus by multiple client
//! simultaneously as well.
//!
//! # Hardware and more information
//!
//! More information on CAN [can be found on Wikipedia](). When not running on
//! an embedded platform with already integrated CAN components,
//! [Thomas Fischl's USBtin](http://www.fischl.de/usbtin/) (see
//! [section 2.4](http://www.fischl.de/usbtin/#socketcan)) is one of many ways
//! to get started.
//!
//! # RawFd
//!
//! Raw access to the underlying file descriptor and construction through
//! is available through the `AsRawFd`, `IntoRawFd` and `FromRawFd`
//! implementations.
// clippy: do not warn about things like "SocketCAN" inside the docs
#![cfg_attr(feature = "cargo-clippy", allow(doc_markdown))]
extern crate byte_conv;
extern crate hex;
extern crate itertools;
extern crate libc;
extern crate netlink_rs;
extern crate nix;
extern crate try_from;
mod err;
pub use err::{CanError, CanErrorDecodingFailure};
pub mod dump;
mod nl;
mod util;
#[cfg(test)]
mod tests;
use libc::{c_int, c_short, c_void, c_uint, c_ulong, socket, SOCK_RAW, close, bind, sockaddr, read,
write, SOL_SOCKET, SO_RCVTIMEO, timespec, timeval, EINPROGRESS, SO_SNDTIMEO, time_t,
suseconds_t, fcntl, F_SETFL, O_NONBLOCK};
use itertools::Itertools;
use nix::net::if_::if_nametoindex;
pub use nl::CanInterface;
use std::{error, fmt, io, time};
use std::mem::{size_of, uninitialized};
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
use util::{set_socket_option, set_socket_option_mult};
/// Check an error return value for timeouts.
///
/// Due to the fact that timeouts are reported as errors, calling `read_frame`
/// on a socket with a timeout that does not receive a frame in time will
/// result in an error being returned. This trait adds a `should_retry` method
/// to `Error` and `Result` to check for this condition.
pub trait ShouldRetry {
/// Check for timeout
///
/// If `true`, the error is probably due to a timeout.
fn should_retry(&self) -> bool;
}
impl ShouldRetry for io::Error {
fn should_retry(&self) -> bool {
match self.kind() {
// EAGAIN, EINPROGRESS and EWOULDBLOCK are the three possible codes
// returned when a timeout occurs. the stdlib already maps EAGAIN
// and EWOULDBLOCK os WouldBlock
io::ErrorKind::WouldBlock => true,
// however, EINPROGRESS is also valid
io::ErrorKind::Other => {
if let Some(i) = self.raw_os_error() {
i == EINPROGRESS
} else {
false
}
}
_ => false,
}
}
}
impl<E: fmt::Debug> ShouldRetry for io::Result<E> {
fn should_retry(&self) -> bool {
if let Err(ref e) = *self {
e.should_retry()
} else {
false
}
}
}
// constants stolen from C headers
const AF_CAN: c_int = 29;
const PF_CAN: c_int = 29;
const CAN_RAW: c_int = 1;
const SOL_CAN_BASE: c_int = 100;
const SOL_CAN_RAW: c_int = SOL_CAN_BASE + CAN_RAW;
const CAN_RAW_FILTER: c_int = 1;
const CAN_RAW_ERR_FILTER: c_int = 2;
const CAN_RAW_LOOPBACK: c_int = 3;
const CAN_RAW_RECV_OWN_MSGS: c_int = 4;
// unused:
// const CAN_RAW_FD_FRAMES: c_int = 5;
const CAN_RAW_JOIN_FILTERS: c_int = 6;
// get timestamp in a struct timeval (us accuracy)
// const SIOCGSTAMP: c_int = 0x8906;
// get timestamp in a struct timespec (ns accuracy)
const SIOCGSTAMPNS: c_int = 0x8907;
/// if set, indicate 29 bit extended format
pub const EFF_FLAG: u32 = 0x80000000;
/// remote transmission request flag
pub const RTR_FLAG: u32 = 0x40000000;
/// error flag
pub const ERR_FLAG: u32 = 0x20000000;
/// valid bits in standard frame id
pub const SFF_MASK: u32 = 0x000007ff;
/// valid bits in extended frame id
pub const EFF_MASK: u32 = 0x1fffffff;
/// valid bits in error frame
pub const ERR_MASK: u32 = 0x1fffffff;
/// an error mask that will cause SocketCAN to report all errors
pub const ERR_MASK_ALL: u32 = ERR_MASK;
/// an error mask that will cause SocketCAN to silently drop all errors
pub const ERR_MASK_NONE: u32 = 0;
fn c_timeval_new(t: time::Duration) -> timeval {
timeval {
tv_sec: t.as_secs() as time_t,
tv_usec: (t.subsec_nanos() / 1000) as suseconds_t,
}
}
#[derive(Debug)]
#[repr(C)]
struct CanAddr {
_af_can: c_short,
if_index: c_int, // address familiy,
rx_id: u32,
tx_id: u32,
}
#[derive(Debug)]
/// Errors opening socket
pub enum CanSocketOpenError {
/// Device could not be found
LookupError(nix::Error),
/// System error while trying to look up device name
IOError(io::Error),
}
impl fmt::Display for CanSocketOpenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CanSocketOpenError::LookupError(ref e) => write!(f, "CAN Device not found: {}", e),
CanSocketOpenError::IOError(ref e) => write!(f, "IO: {}", e),
}
}
}
impl error::Error for CanSocketOpenError {
fn description(&self) -> &str {
match *self {
CanSocketOpenError::LookupError(_) => "can device not found",
CanSocketOpenError::IOError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
CanSocketOpenError::LookupError(ref e) => Some(e),
CanSocketOpenError::IOError(ref e) => Some(e),
}
}
}
#[derive(Debug, Copy, Clone)]
/// Error that occurs when creating CAN packets
pub enum ConstructionError {
/// CAN ID was outside the range of valid IDs
IDTooLarge,
/// More than 8 Bytes of payload data were passed in
TooMuchData,
}
impl fmt::Display for ConstructionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ConstructionError::IDTooLarge => write!(f, "CAN ID too large"),
ConstructionError::TooMuchData => {
write!(f, "Payload is larger than CAN maximum of 8 bytes")
}
}
}
}
impl error::Error for ConstructionError {
fn description(&self) -> &str {
match *self {
ConstructionError::IDTooLarge => "can id too large",
ConstructionError::TooMuchData => "too much data",
}
}
}
impl From<nix::Error> for CanSocketOpenError {
fn from(e: nix::Error) -> CanSocketOpenError {
CanSocketOpenError::LookupError(e)
}
}
impl From<io::Error> for CanSocketOpenError {
fn from(e: io::Error) -> CanSocketOpenError {
CanSocketOpenError::IOError(e)
}
}
/// A socket for a CAN device.
///
/// Will be closed upon deallocation. To close manually, use std::drop::Drop.
/// Internally this is just a wrapped file-descriptor.
#[derive(Debug)]
pub struct CanSocket {
fd: c_int,
}
impl CanSocket {
/// Open a named CAN device.
///
/// Usually the more common case, opens a socket can device by name, such
/// as "vcan0" or "socan0".
pub fn open(ifname: &str) -> Result<CanSocket, CanSocketOpenError> {
let if_index = if_nametoindex(ifname)?;
CanSocket::open_if(if_index)
}
/// Open CAN device by interface number.
///
/// Opens a CAN device by kernel interface number.
pub fn open_if(if_index: c_uint) -> Result<CanSocket, CanSocketOpenError> {
let addr = CanAddr {
_af_can: AF_CAN as c_short,
if_index: if_index as c_int,
rx_id: 0, // ?
tx_id: 0, // ?
};
// open socket
let sock_fd;
unsafe {
sock_fd = socket(PF_CAN, SOCK_RAW, CAN_RAW);
}
if sock_fd == -1 {
return Err(CanSocketOpenError::from(io::Error::last_os_error()));
}
// bind it
let bind_rv;
unsafe {
let sockaddr_ptr = &addr as *const CanAddr;
bind_rv = bind(sock_fd,
sockaddr_ptr as *const sockaddr,
size_of::<CanAddr>() as u32);
}
// FIXME: on fail, close socket (do not leak socketfds)
if bind_rv == -1 {
let e = io::Error::last_os_error();
unsafe {
close(sock_fd);
}
return Err(CanSocketOpenError::from(e));
}
Ok(CanSocket { fd: sock_fd })
}
fn close(&mut self) -> io::Result<()> {
unsafe {
let rv = close(self.fd);
if rv != -1 {
return Err(io::Error::last_os_error());
}
}
Ok(())
}
/// Change socket to non-blocking mode
pub fn set_nonblocking(&self) -> io::Result<()> {
let rv = unsafe { fcntl(self.fd, F_SETFL, O_NONBLOCK) };
if rv != 0 {
return Err(io::Error::last_os_error());
}
Ok(())
}
/// Sets the read timeout on the socket
///
/// For convenience, the result value can be checked using
/// `ShouldRetry::should_retry` when a timeout is set.
pub fn set_read_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_RCVTIMEO, &c_timeval_new(duration))
}
/// Sets the write timeout on the socket
pub fn set_write_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_SNDTIMEO, &c_timeval_new(duration))
}
/// Blocking read a single can frame.
pub fn read_frame(&self) -> io::Result<CanFrame> {
let mut frame = CanFrame {
_id: 0,
_data_len: 0,
_pad: 0,
_res0: 0,
_res1: 0,
_data: [0; 8],
};
let read_rv = unsafe {
let frame_ptr = &mut frame as *mut CanFrame;
read(self.fd, frame_ptr as *mut c_void, size_of::<CanFrame>())
};
if read_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(frame)
}
/// Blocking read a single can frame with timestamp
///
/// Note that reading a frame and retrieving the timestamp requires two
/// consecutive syscalls. To avoid race conditions, exclusive access
/// to the socket is enforce through requiring a `mut &self`.
pub fn read_frame_with_timestamp(&mut self) -> io::Result<(CanFrame, time::SystemTime)> {
let frame = self.read_frame()?;
let mut ts: timespec;
let rval = unsafe {
// we initialize tv calling ioctl, passing this responsibility on
ts = uninitialized();
libc::ioctl(self.fd, SIOCGSTAMPNS as c_ulong, &mut ts as *mut timespec)
};
if rval == -1 {
return Err(io::Error::last_os_error());
}
Ok((frame, util::system_time_from_timespec(ts)))
}
/// Write a single can frame.
///
/// Note that this function can fail with an `EAGAIN` error or similar.
/// Use `write_frame_insist` if you need to be sure that the message got
/// sent or failed.
pub fn write_frame(&self, frame: &CanFrame) -> io::Result<()> {
// not a mutable reference needed (see std::net::UdpSocket) for
// a comparison
// debug!("Sending: {:?}", frame);
let write_rv = unsafe {
let frame_ptr = frame as *const CanFrame;
write(self.fd, frame_ptr as *const c_void, size_of::<CanFrame>())
};
if write_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(())
}
/// Blocking write a single can frame, retrying until it gets sent
/// successfully.
pub fn write_frame_insist(&self, frame: &CanFrame) -> io::Result<()> {
loop {
match self.write_frame(frame) {
Ok(v) => return Ok(v),
Err(e) => {
if !e.should_retry() {
return Err(e);
}
}
}
}
}
/// Sets filters on the socket.
///
/// CAN packages received by SocketCAN are matched against these filters,
/// only matching packets are returned by the interface.
///
/// See `CanFilter` for details on how filtering works. By default, all
/// single filter matching all incoming frames is installed.
pub fn set_filters(&self, filters: &[CanFilter]) -> io::Result<()> {
set_socket_option_mult(self.fd, SOL_CAN_RAW, CAN_RAW_FILTER, filters)
}
/// Sets the error mask on the socket.
///
/// By default (`ERR_MASK_NONE`) no error conditions are reported as
/// special error frames by the socket. Enabling error conditions by
/// setting `ERR_MASK_ALL` or another non-empty error mask causes the
/// socket to receive notification about the specified conditions.
#[inline]
pub fn set_error_mask(&self, mask: u32) -> io::Result<()> {
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_ERR_FILTER, &mask)
}
/// Enable or disable loopback.
///
/// By default, loopback is enabled, causing other applications that open
/// the same CAN bus to see frames emitted by different applications on
/// the same system.
#[inline]
pub fn set_loopback(&self, enabled: bool) -> io::Result<()> {
let loopback: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_LOOPBACK, &loopback)
}
/// Enable or disable receiving of own frames.
///
/// When loopback is enabled, this settings controls if CAN frames sent
/// are received back immediately by sender. Default is off.
pub fn set_recv_own_msgs(&self, enabled: bool) -> io::Result<()> {
let recv_own_msgs: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_RECV_OWN_MSGS, &recv_own_msgs)
}
/// Enable or disable join filters.
///
/// By default a frame is accepted if it matches any of the filters set
/// with `set_filters`. If join filters is enabled, a frame has to match
/// _all_ filters to be accepted.
pub fn set_join_filters(&self, enabled: bool) -> io::Result<()> {
let join_filters: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_JOIN_FILTERS, &join_filters)
}
}
impl AsRawFd for CANSocket {
fn as_raw_fd(&self) -> RawFd {
self.fd
}
}
impl FromRawFd for CANSocket {
unsafe fn from_raw_fd(fd: RawFd) -> CANSocket {
CANSocket { fd: fd }
}
}
impl IntoRawFd for CANSocket {
fn into_raw_fd(self) -> RawFd {
self.fd
}
}
impl Drop for CanSocket {
fn drop(&mut self) {
self.close().ok(); // ignore result
}
}
/// CanFrame
///
/// Uses the same memory layout as the underlying kernel struct for performance
/// reasons.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFrame {
/// 32 bit CAN_ID + EFF/RTR/ERR flags
_id: u32,
/// data length. Bytes beyond are not valid
_data_len: u8,
/// padding
_pad: u8,
/// reserved
_res0: u8,
/// reserved
_res1: u8,
/// buffer for data
_data: [u8; 8],
}
impl CanFrame {
pub fn new(id: u32, data: &[u8], rtr: bool, err: bool) -> Result<CanFrame, ConstructionError> {
let mut _id = id;
if data.len() > 8 {
return Err(ConstructionError::TooMuchData);
}
if id > EFF_MASK {
return Err(ConstructionError::IDTooLarge);
}
// set EFF_FLAG on large message
if id > SFF_MASK {
_id |= EFF_FLAG;
}
if rtr {
_id |= RTR_FLAG;
}
if err {
_id |= ERR_FLAG;
}
let mut full_data = [0; 8];
// not cool =/
for (n, c) in data.iter().enumerate() {
full_data[n] = *c;
}
Ok(CanFrame {
_id: _id,
_data_len: data.len() as u8,
_pad: 0,
_res0: 0,
_res1: 0,
_data: full_data,
})
}
/// Return the actual CAN ID (without EFF/RTR/ERR flags)
#[inline]
pub fn id(&self) -> u32 {
if self.is_extended() {
self._id & EFF_MASK
} else {
self._id & SFF_MASK
}
}
/// Return the error message
#[inline]
pub fn err(&self) -> u32 {
self._id & ERR_MASK
}
/// Check if frame uses 29 bit extended frame format
#[inline]
pub fn is_extended(&self) -> bool {
self._id & EFF_FLAG != 0
}
/// Check if frame is an error message
#[inline]
pub fn is_error(&self) -> bool {
self._id & ERR_FLAG != 0
}
/// Check if frame is a remote transmission request
#[inline]
pub fn is_rtr(&self) -> bool {
self._id & RTR_FLAG != 0
}
/// A slice into the actual data. Slice will always be <= 8 bytes in length
#[inline]
pub fn data(&self) -> &[u8] {
&self._data[..(self._data_len as usize)]
}
/// Read error from message and transform it into a `CanError`.
///
/// SocketCAN errors are indicated using the error bit and coded inside
/// id and data payload. Call `error()` converts these into usable
/// `CanError` instances.
///
/// If the frame is malformed, this may fail with a
/// `CanErrorDecodingFailure`.
#[inline]
pub fn error(&self) -> Result<CanError, CanErrorDecodingFailure> {
CanError::from_frame(self)
}
}
impl fmt::UpperHex for CanFrame {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{:X}#", self.id())?;
let mut parts = self.data().iter().map(|v| format!("{:02X}", v));
let sep = if f.alternate() { " " } else { "" };
write!(f, "{}", parts.join(sep))
}
}
/// CanFilter
///
/// Contains an internal id and mask. Packets are considered to be matched by
/// a filter if `received_id & mask == filter_id & mask` holds true.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFilter {
_id: u32,
_mask: u32,
}
impl CanFilter {
/// Construct a new CAN filter.
pub fn new(id: u32, mask: u32) -> Result<CanFilter, ConstructionError> {
Ok(CanFilter {
_id: id,
_mask: mask,
})
}
}
Rustfmt.
//! SocketCAN support.
//!
//! The Linux kernel supports using CAN-devices through a network-like API
//! (see https://www.kernel.org/doc/Documentation/networking/can.txt). This
//! crate allows easy access to this functionality without having to wrestle
//! libc calls.
//!
//! # An introduction to CAN
//!
//! The CAN bus was originally designed to allow microcontrollers inside a
//! vehicle to communicate over a single shared bus. Messages called
//! *frames* are multicast to all devices on the bus.
//!
//! Every frame consists of an ID and a payload of up to 8 bytes. If two
//! devices attempt to send a frame at the same time, the device with the
//! higher ID will notice the conflict, stop sending and reattempt to sent its
//! frame in the next time slot. This means that the lower the ID, the higher
//! the priority. Since most devices have a limited buffer for outgoing frames,
//! a single device with a high priority (== low ID) can block communication
//! on that bus by sending messages too fast.
//!
//! The Linux socketcan subsystem makes the CAN bus available as a regular
//! networking device. Opening an network interface allows receiving all CAN
//! messages received on it. A device CAN be opened multiple times, every
//! client will receive all CAN frames simultaneously.
//!
//! Similarly, CAN frames can be sent to the bus by multiple client
//! simultaneously as well.
//!
//! # Hardware and more information
//!
//! More information on CAN [can be found on Wikipedia](). When not running on
//! an embedded platform with already integrated CAN components,
//! [Thomas Fischl's USBtin](http://www.fischl.de/usbtin/) (see
//! [section 2.4](http://www.fischl.de/usbtin/#socketcan)) is one of many ways
//! to get started.
//!
//! # RawFd
//!
//! Raw access to the underlying file descriptor and construction through
//! is available through the `AsRawFd`, `IntoRawFd` and `FromRawFd`
//! implementations.
// clippy: do not warn about things like "SocketCAN" inside the docs
#![cfg_attr(feature = "cargo-clippy", allow(doc_markdown))]
extern crate byte_conv;
extern crate hex;
extern crate itertools;
extern crate libc;
extern crate netlink_rs;
extern crate nix;
extern crate try_from;
mod err;
pub use err::{CanError, CanErrorDecodingFailure};
pub mod dump;
mod nl;
mod util;
#[cfg(test)]
mod tests;
use libc::{c_int, c_short, c_void, c_uint, c_ulong, socket, SOCK_RAW, close, bind, sockaddr, read,
write, SOL_SOCKET, SO_RCVTIMEO, timespec, timeval, EINPROGRESS, SO_SNDTIMEO, time_t,
suseconds_t, fcntl, F_SETFL, O_NONBLOCK};
use itertools::Itertools;
use nix::net::if_::if_nametoindex;
pub use nl::CanInterface;
use std::{error, fmt, io, time};
use std::mem::{size_of, uninitialized};
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
use util::{set_socket_option, set_socket_option_mult};
/// Check an error return value for timeouts.
///
/// Due to the fact that timeouts are reported as errors, calling `read_frame`
/// on a socket with a timeout that does not receive a frame in time will
/// result in an error being returned. This trait adds a `should_retry` method
/// to `Error` and `Result` to check for this condition.
pub trait ShouldRetry {
/// Check for timeout
///
/// If `true`, the error is probably due to a timeout.
fn should_retry(&self) -> bool;
}
impl ShouldRetry for io::Error {
fn should_retry(&self) -> bool {
match self.kind() {
// EAGAIN, EINPROGRESS and EWOULDBLOCK are the three possible codes
// returned when a timeout occurs. the stdlib already maps EAGAIN
// and EWOULDBLOCK os WouldBlock
io::ErrorKind::WouldBlock => true,
// however, EINPROGRESS is also valid
io::ErrorKind::Other => {
if let Some(i) = self.raw_os_error() {
i == EINPROGRESS
} else {
false
}
}
_ => false,
}
}
}
impl<E: fmt::Debug> ShouldRetry for io::Result<E> {
fn should_retry(&self) -> bool {
if let Err(ref e) = *self {
e.should_retry()
} else {
false
}
}
}
// constants stolen from C headers
const AF_CAN: c_int = 29;
const PF_CAN: c_int = 29;
const CAN_RAW: c_int = 1;
const SOL_CAN_BASE: c_int = 100;
const SOL_CAN_RAW: c_int = SOL_CAN_BASE + CAN_RAW;
const CAN_RAW_FILTER: c_int = 1;
const CAN_RAW_ERR_FILTER: c_int = 2;
const CAN_RAW_LOOPBACK: c_int = 3;
const CAN_RAW_RECV_OWN_MSGS: c_int = 4;
// unused:
// const CAN_RAW_FD_FRAMES: c_int = 5;
const CAN_RAW_JOIN_FILTERS: c_int = 6;
// get timestamp in a struct timeval (us accuracy)
// const SIOCGSTAMP: c_int = 0x8906;
// get timestamp in a struct timespec (ns accuracy)
const SIOCGSTAMPNS: c_int = 0x8907;
/// if set, indicate 29 bit extended format
pub const EFF_FLAG: u32 = 0x80000000;
/// remote transmission request flag
pub const RTR_FLAG: u32 = 0x40000000;
/// error flag
pub const ERR_FLAG: u32 = 0x20000000;
/// valid bits in standard frame id
pub const SFF_MASK: u32 = 0x000007ff;
/// valid bits in extended frame id
pub const EFF_MASK: u32 = 0x1fffffff;
/// valid bits in error frame
pub const ERR_MASK: u32 = 0x1fffffff;
/// an error mask that will cause SocketCAN to report all errors
pub const ERR_MASK_ALL: u32 = ERR_MASK;
/// an error mask that will cause SocketCAN to silently drop all errors
pub const ERR_MASK_NONE: u32 = 0;
fn c_timeval_new(t: time::Duration) -> timeval {
timeval {
tv_sec: t.as_secs() as time_t,
tv_usec: (t.subsec_nanos() / 1000) as suseconds_t,
}
}
#[derive(Debug)]
#[repr(C)]
struct CanAddr {
_af_can: c_short,
if_index: c_int, // address familiy,
rx_id: u32,
tx_id: u32,
}
#[derive(Debug)]
/// Errors opening socket
pub enum CanSocketOpenError {
/// Device could not be found
LookupError(nix::Error),
/// System error while trying to look up device name
IOError(io::Error),
}
impl fmt::Display for CanSocketOpenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CanSocketOpenError::LookupError(ref e) => write!(f, "CAN Device not found: {}", e),
CanSocketOpenError::IOError(ref e) => write!(f, "IO: {}", e),
}
}
}
impl error::Error for CanSocketOpenError {
fn description(&self) -> &str {
match *self {
CanSocketOpenError::LookupError(_) => "can device not found",
CanSocketOpenError::IOError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
CanSocketOpenError::LookupError(ref e) => Some(e),
CanSocketOpenError::IOError(ref e) => Some(e),
}
}
}
#[derive(Debug, Copy, Clone)]
/// Error that occurs when creating CAN packets
pub enum ConstructionError {
/// CAN ID was outside the range of valid IDs
IDTooLarge,
/// More than 8 Bytes of payload data were passed in
TooMuchData,
}
impl fmt::Display for ConstructionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ConstructionError::IDTooLarge => write!(f, "CAN ID too large"),
ConstructionError::TooMuchData => {
write!(f, "Payload is larger than CAN maximum of 8 bytes")
}
}
}
}
impl error::Error for ConstructionError {
fn description(&self) -> &str {
match *self {
ConstructionError::IDTooLarge => "can id too large",
ConstructionError::TooMuchData => "too much data",
}
}
}
impl From<nix::Error> for CanSocketOpenError {
fn from(e: nix::Error) -> CanSocketOpenError {
CanSocketOpenError::LookupError(e)
}
}
impl From<io::Error> for CanSocketOpenError {
fn from(e: io::Error) -> CanSocketOpenError {
CanSocketOpenError::IOError(e)
}
}
/// A socket for a CAN device.
///
/// Will be closed upon deallocation. To close manually, use std::drop::Drop.
/// Internally this is just a wrapped file-descriptor.
#[derive(Debug)]
pub struct CanSocket {
fd: c_int,
}
impl CanSocket {
/// Open a named CAN device.
///
/// Usually the more common case, opens a socket can device by name, such
/// as "vcan0" or "socan0".
pub fn open(ifname: &str) -> Result<CanSocket, CanSocketOpenError> {
let if_index = if_nametoindex(ifname)?;
CanSocket::open_if(if_index)
}
/// Open CAN device by interface number.
///
/// Opens a CAN device by kernel interface number.
pub fn open_if(if_index: c_uint) -> Result<CanSocket, CanSocketOpenError> {
let addr = CanAddr {
_af_can: AF_CAN as c_short,
if_index: if_index as c_int,
rx_id: 0, // ?
tx_id: 0, // ?
};
// open socket
let sock_fd;
unsafe {
sock_fd = socket(PF_CAN, SOCK_RAW, CAN_RAW);
}
if sock_fd == -1 {
return Err(CanSocketOpenError::from(io::Error::last_os_error()));
}
// bind it
let bind_rv;
unsafe {
let sockaddr_ptr = &addr as *const CanAddr;
bind_rv = bind(sock_fd,
sockaddr_ptr as *const sockaddr,
size_of::<CanAddr>() as u32);
}
// FIXME: on fail, close socket (do not leak socketfds)
if bind_rv == -1 {
let e = io::Error::last_os_error();
unsafe {
close(sock_fd);
}
return Err(CanSocketOpenError::from(e));
}
Ok(CanSocket { fd: sock_fd })
}
fn close(&mut self) -> io::Result<()> {
unsafe {
let rv = close(self.fd);
if rv != -1 {
return Err(io::Error::last_os_error());
}
}
Ok(())
}
/// Change socket to non-blocking mode
pub fn set_nonblocking(&self) -> io::Result<()> {
let rv = unsafe { fcntl(self.fd, F_SETFL, O_NONBLOCK) };
if rv != 0 {
return Err(io::Error::last_os_error());
}
Ok(())
}
/// Sets the read timeout on the socket
///
/// For convenience, the result value can be checked using
/// `ShouldRetry::should_retry` when a timeout is set.
pub fn set_read_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_RCVTIMEO, &c_timeval_new(duration))
}
/// Sets the write timeout on the socket
pub fn set_write_timeout(&self, duration: time::Duration) -> io::Result<()> {
set_socket_option(self.fd, SOL_SOCKET, SO_SNDTIMEO, &c_timeval_new(duration))
}
/// Blocking read a single can frame.
pub fn read_frame(&self) -> io::Result<CanFrame> {
let mut frame = CanFrame {
_id: 0,
_data_len: 0,
_pad: 0,
_res0: 0,
_res1: 0,
_data: [0; 8],
};
let read_rv = unsafe {
let frame_ptr = &mut frame as *mut CanFrame;
read(self.fd, frame_ptr as *mut c_void, size_of::<CanFrame>())
};
if read_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(frame)
}
/// Blocking read a single can frame with timestamp
///
/// Note that reading a frame and retrieving the timestamp requires two
/// consecutive syscalls. To avoid race conditions, exclusive access
/// to the socket is enforce through requiring a `mut &self`.
pub fn read_frame_with_timestamp(&mut self) -> io::Result<(CanFrame, time::SystemTime)> {
let frame = self.read_frame()?;
let mut ts: timespec;
let rval = unsafe {
// we initialize tv calling ioctl, passing this responsibility on
ts = uninitialized();
libc::ioctl(self.fd, SIOCGSTAMPNS as c_ulong, &mut ts as *mut timespec)
};
if rval == -1 {
return Err(io::Error::last_os_error());
}
Ok((frame, util::system_time_from_timespec(ts)))
}
/// Write a single can frame.
///
/// Note that this function can fail with an `EAGAIN` error or similar.
/// Use `write_frame_insist` if you need to be sure that the message got
/// sent or failed.
pub fn write_frame(&self, frame: &CanFrame) -> io::Result<()> {
// not a mutable reference needed (see std::net::UdpSocket) for
// a comparison
// debug!("Sending: {:?}", frame);
let write_rv = unsafe {
let frame_ptr = frame as *const CanFrame;
write(self.fd, frame_ptr as *const c_void, size_of::<CanFrame>())
};
if write_rv as usize != size_of::<CanFrame>() {
return Err(io::Error::last_os_error());
}
Ok(())
}
/// Blocking write a single can frame, retrying until it gets sent
/// successfully.
pub fn write_frame_insist(&self, frame: &CanFrame) -> io::Result<()> {
loop {
match self.write_frame(frame) {
Ok(v) => return Ok(v),
Err(e) => {
if !e.should_retry() {
return Err(e);
}
}
}
}
}
/// Sets filters on the socket.
///
/// CAN packages received by SocketCAN are matched against these filters,
/// only matching packets are returned by the interface.
///
/// See `CanFilter` for details on how filtering works. By default, all
/// single filter matching all incoming frames is installed.
pub fn set_filters(&self, filters: &[CanFilter]) -> io::Result<()> {
set_socket_option_mult(self.fd, SOL_CAN_RAW, CAN_RAW_FILTER, filters)
}
/// Sets the error mask on the socket.
///
/// By default (`ERR_MASK_NONE`) no error conditions are reported as
/// special error frames by the socket. Enabling error conditions by
/// setting `ERR_MASK_ALL` or another non-empty error mask causes the
/// socket to receive notification about the specified conditions.
#[inline]
pub fn set_error_mask(&self, mask: u32) -> io::Result<()> {
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_ERR_FILTER, &mask)
}
/// Enable or disable loopback.
///
/// By default, loopback is enabled, causing other applications that open
/// the same CAN bus to see frames emitted by different applications on
/// the same system.
#[inline]
pub fn set_loopback(&self, enabled: bool) -> io::Result<()> {
let loopback: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_LOOPBACK, &loopback)
}
/// Enable or disable receiving of own frames.
///
/// When loopback is enabled, this settings controls if CAN frames sent
/// are received back immediately by sender. Default is off.
pub fn set_recv_own_msgs(&self, enabled: bool) -> io::Result<()> {
let recv_own_msgs: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_RECV_OWN_MSGS, &recv_own_msgs)
}
/// Enable or disable join filters.
///
/// By default a frame is accepted if it matches any of the filters set
/// with `set_filters`. If join filters is enabled, a frame has to match
/// _all_ filters to be accepted.
pub fn set_join_filters(&self, enabled: bool) -> io::Result<()> {
let join_filters: c_int = if enabled { 1 } else { 0 };
set_socket_option(self.fd, SOL_CAN_RAW, CAN_RAW_JOIN_FILTERS, &join_filters)
}
}
impl AsRawFd for CANSocket {
fn as_raw_fd(&self) -> RawFd {
self.fd
}
}
impl FromRawFd for CANSocket {
unsafe fn from_raw_fd(fd: RawFd) -> CANSocket {
CANSocket { fd: fd }
}
}
impl IntoRawFd for CANSocket {
fn into_raw_fd(self) -> RawFd {
self.fd
}
}
impl Drop for CanSocket {
fn drop(&mut self) {
self.close().ok(); // ignore result
}
}
/// CanFrame
///
/// Uses the same memory layout as the underlying kernel struct for performance
/// reasons.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFrame {
/// 32 bit CAN_ID + EFF/RTR/ERR flags
_id: u32,
/// data length. Bytes beyond are not valid
_data_len: u8,
/// padding
_pad: u8,
/// reserved
_res0: u8,
/// reserved
_res1: u8,
/// buffer for data
_data: [u8; 8],
}
impl CanFrame {
pub fn new(id: u32, data: &[u8], rtr: bool, err: bool) -> Result<CanFrame, ConstructionError> {
let mut _id = id;
if data.len() > 8 {
return Err(ConstructionError::TooMuchData);
}
if id > EFF_MASK {
return Err(ConstructionError::IDTooLarge);
}
// set EFF_FLAG on large message
if id > SFF_MASK {
_id |= EFF_FLAG;
}
if rtr {
_id |= RTR_FLAG;
}
if err {
_id |= ERR_FLAG;
}
let mut full_data = [0; 8];
// not cool =/
for (n, c) in data.iter().enumerate() {
full_data[n] = *c;
}
Ok(CanFrame {
_id: _id,
_data_len: data.len() as u8,
_pad: 0,
_res0: 0,
_res1: 0,
_data: full_data,
})
}
/// Return the actual CAN ID (without EFF/RTR/ERR flags)
#[inline]
pub fn id(&self) -> u32 {
if self.is_extended() {
self._id & EFF_MASK
} else {
self._id & SFF_MASK
}
}
/// Return the error message
#[inline]
pub fn err(&self) -> u32 {
self._id & ERR_MASK
}
/// Check if frame uses 29 bit extended frame format
#[inline]
pub fn is_extended(&self) -> bool {
self._id & EFF_FLAG != 0
}
/// Check if frame is an error message
#[inline]
pub fn is_error(&self) -> bool {
self._id & ERR_FLAG != 0
}
/// Check if frame is a remote transmission request
#[inline]
pub fn is_rtr(&self) -> bool {
self._id & RTR_FLAG != 0
}
/// A slice into the actual data. Slice will always be <= 8 bytes in length
#[inline]
pub fn data(&self) -> &[u8] {
&self._data[..(self._data_len as usize)]
}
/// Read error from message and transform it into a `CanError`.
///
/// SocketCAN errors are indicated using the error bit and coded inside
/// id and data payload. Call `error()` converts these into usable
/// `CanError` instances.
///
/// If the frame is malformed, this may fail with a
/// `CanErrorDecodingFailure`.
#[inline]
pub fn error(&self) -> Result<CanError, CanErrorDecodingFailure> {
CanError::from_frame(self)
}
}
impl fmt::UpperHex for CanFrame {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{:X}#", self.id())?;
let mut parts = self.data().iter().map(|v| format!("{:02X}", v));
let sep = if f.alternate() { " " } else { "" };
write!(f, "{}", parts.join(sep))
}
}
/// CanFilter
///
/// Contains an internal id and mask. Packets are considered to be matched by
/// a filter if `received_id & mask == filter_id & mask` holds true.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct CanFilter {
_id: u32,
_mask: u32,
}
impl CanFilter {
/// Construct a new CAN filter.
pub fn new(id: u32, mask: u32) -> Result<CanFilter, ConstructionError> {
Ok(CanFilter {
_id: id,
_mask: mask,
})
}
}
|
#![crate_type = "lib"]
#![feature(ip_addr)]
#[allow(dead_code)]
use std::fmt;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
// A network
#[derive(Debug)]
pub enum IpNetwork {
V4(Ipv4Network),
V6(Ipv6Network),
}
pub struct Ipv4Network {
addr: Ipv4Addr,
prefix: u8,
}
pub struct Ipv6Network {
addr: Ipv6Addr,
prefix: u8,
}
impl Ipv4Network {
pub fn new(addr: Ipv4Addr, prefix: u8) -> Ipv4Network {
Ipv4Network { addr: addr, prefix: prefix }
}
pub fn ip(&self) -> &Ipv4Addr {
&(self.addr)
}
pub fn prefix(&self) -> u8 {
self.prefix
}
pub fn mask_int(&self) -> u32 {
let prefix = self.prefix;
!(0xffffffff >> prefix)
}
pub fn mask(&self) -> Ipv4Addr {
let mask = self.mask_int();
Ipv4Addr::from(mask)
}
pub fn network_int(&self) -> u32 {
u32::from(self.addr)
}
pub fn network(&self) -> Ipv4Addr {
self.addr
}
}
impl Ipv6Network {
pub fn new(addr: Ipv6Addr, prefix: u8) -> Ipv6Network {
Ipv6Network { addr: addr, prefix: prefix }
}
pub fn ip(&self) -> &Ipv6Addr {
&(self.addr)
}
pub fn prefix(&self) -> u8 {
self.prefix
}
}
impl IpNetwork {
pub fn new(ip: IpAddr, prefix: u8) -> IpNetwork {
match ip {
IpAddr::V4(a) => IpNetwork::V4(Ipv4Network::new(a, prefix)),
IpAddr::V6(a) => IpNetwork::V6(Ipv6Network::new(a, prefix)),
}
}
pub fn ip(&self) -> IpAddr {
match *self {
IpNetwork::V4(ref a) => IpAddr::V4(*a.ip()),
IpNetwork::V6(ref a) => IpAddr::V6(*a.ip()),
}
}
pub fn prefix(&self) -> u8 {
match *self {
IpNetwork::V4(ref a) => a.prefix(),
IpNetwork::V6(ref a) => a.prefix(),
}
}
}
impl fmt::Debug for Ipv4Network {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}/{}", self.ip(), self.prefix())
}
}
impl fmt::Debug for Ipv6Network {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}/{}", self.ip(), self.prefix())
}
}
#[cfg(test)]
mod test {
use std::net::{Ipv4Addr, Ipv6Addr};
use super::*;
#[test]
fn create_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(77, 88, 21, 11), 24);
assert_eq!(cidr.prefix(), 24);
}
#[test]
fn create_v6() {
let cidr = Ipv6Network::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1), 24);
assert_eq!(cidr.prefix(), 24);
}
#[test]
fn mask_int_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(74, 125, 227, 0), 29);
assert_eq!(cidr.mask_int(), 4294967288);
}
#[test]
fn mask_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(74, 125, 227, 0), 29);
assert_eq!(cidr.mask(), Ipv4Addr::new(255, 255, 255, 248));
}
#[test]
fn network_int_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(74, 125, 227, 0), 25);
assert_eq!(cidr.network_int(), 1249764096);
}
#[test]
fn network_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(74, 125, 227, 0), 25);
assert_eq!(cidr.network(), Ipv4Addr::new(74, 125, 227, 0));
}
}
Simplify API
#![crate_type = "lib"]
#![feature(ip_addr)]
#[allow(dead_code)]
use std::fmt;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
// A network
#[derive(Debug)]
pub enum IpNetwork {
V4(Ipv4Network),
V6(Ipv6Network),
}
pub struct Ipv4Network {
addr: Ipv4Addr,
prefix: u8,
}
pub struct Ipv6Network {
addr: Ipv6Addr,
prefix: u8,
}
impl Ipv4Network {
pub fn new(addr: Ipv4Addr, prefix: u8) -> Ipv4Network {
Ipv4Network { addr: addr, prefix: prefix }
}
pub fn ip(&self) -> &Ipv4Addr {
&(self.addr)
}
pub fn prefix(&self) -> u8 {
self.prefix
}
pub fn mask_int(&self) -> u32 {
let prefix = self.prefix;
!(0xffffffff >> prefix)
}
pub fn mask(&self) -> (Ipv4Addr, u32) {
let prefix = self.prefix;
let mask = !(0xffffffff >> prefix);
return (Ipv4Addr::from(mask), mask);
}
pub fn network(&self) -> (Ipv4Addr, u32) {
return (self.addr, u32::from(self.addr));
}
}
impl Ipv6Network {
pub fn new(addr: Ipv6Addr, prefix: u8) -> Ipv6Network {
Ipv6Network { addr: addr, prefix: prefix }
}
pub fn ip(&self) -> &Ipv6Addr {
&(self.addr)
}
pub fn prefix(&self) -> u8 {
self.prefix
}
}
impl IpNetwork {
pub fn new(ip: IpAddr, prefix: u8) -> IpNetwork {
match ip {
IpAddr::V4(a) => IpNetwork::V4(Ipv4Network::new(a, prefix)),
IpAddr::V6(a) => IpNetwork::V6(Ipv6Network::new(a, prefix)),
}
}
pub fn ip(&self) -> IpAddr {
match *self {
IpNetwork::V4(ref a) => IpAddr::V4(*a.ip()),
IpNetwork::V6(ref a) => IpAddr::V6(*a.ip()),
}
}
pub fn prefix(&self) -> u8 {
match *self {
IpNetwork::V4(ref a) => a.prefix(),
IpNetwork::V6(ref a) => a.prefix(),
}
}
}
impl fmt::Debug for Ipv4Network {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}/{}", self.ip(), self.prefix())
}
}
impl fmt::Debug for Ipv6Network {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}/{}", self.ip(), self.prefix())
}
}
#[cfg(test)]
mod test {
use std::net::{Ipv4Addr, Ipv6Addr};
use super::*;
#[test]
fn create_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(77, 88, 21, 11), 24);
assert_eq!(cidr.prefix(), 24);
}
#[test]
fn create_v6() {
let cidr = Ipv6Network::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1), 24);
assert_eq!(cidr.prefix(), 24);
}
#[test]
fn mask_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(74, 125, 227, 0), 29);
let (ip, int) = cidr.mask();
assert_eq!(ip, Ipv4Addr::new(255, 255, 255, 248));
assert_eq!(int, 4294967288);
}
#[test]
fn network_v4() {
let cidr = Ipv4Network::new(Ipv4Addr::new(74, 125, 227, 0), 25);
let (ip, int) = cidr.network();
assert_eq!(ip, Ipv4Addr::new(74, 125, 227, 0));
assert_eq!(int, 1249764096);
}
}
|
#![feature(core,io,net)]
extern crate rustc_serialize;
extern crate hyper;
mod error;
mod errorcode;
mod response;
use std::io::Read;
use std::collections::HashSet;
use std::net::IpAddr;
pub use error::RecaptchaError;
pub use errorcode::RecaptchaErrorCode;
use rustc_serialize::json;
use response::RecaptchaResponse;
/// Verify a recaptcha user response
pub fn verify(key: &str, response: &str, user_ip: Option<&IpAddr>) -> Result<(), RecaptchaError> {
use hyper::{Client, Url};
let user_ip = user_ip.map(ToString::to_string);
let user_ip = user_ip.as_ref().map(|s| &**s);
let mut query = vec![
("secret", key),
("response", response),
];
if let Some(user_ip) = user_ip {
query.push(("remoteip", &user_ip));
}
let mut url = Url::parse("https://www.google.com/recaptcha/api/siteverify").unwrap();
url.set_query_from_pairs(query.into_iter());
let mut client = Client::new();
let mut response = try!(client.get(url).send());
let ref mut body = String::new();
try!(response.read_to_string(body));
let recaptcha_response = try!(json::decode::<RecaptchaResponse>(&body));
match (recaptcha_response.success, recaptcha_response.error_codes) {
(true, _) => Ok(()),
(false, Some(errors)) => Err(RecaptchaError::Codes(errors)),
(false, _) => Err(RecaptchaError::Codes(HashSet::new()))
}
}
#[test]
fn test_invalid_secret_missing_response() {
use RecaptchaError::*;
use RecaptchaErrorCode::*;
let resp = verify("", "", None);
match resp {
Ok(()) => panic!("unexpected response: Ok(())"),
Err(Codes(ref errors)) => {
assert!(errors.contains(&InvalidSecret));
assert!(errors.contains(&MissingResponse));
}
Err(e) => panic!("unexpected error: {}", e),
};
println!("{:?}", resp);
}
Update features
#![feature(ip_addr)]
extern crate rustc_serialize;
extern crate hyper;
mod error;
mod errorcode;
mod response;
use std::io::Read;
use std::collections::HashSet;
use std::net::IpAddr;
pub use error::RecaptchaError;
pub use errorcode::RecaptchaErrorCode;
use rustc_serialize::json;
use response::RecaptchaResponse;
/// Verify a recaptcha user response
pub fn verify(key: &str, response: &str, user_ip: Option<&IpAddr>) -> Result<(), RecaptchaError> {
use hyper::{Client, Url};
let user_ip = user_ip.map(ToString::to_string);
let user_ip = user_ip.as_ref().map(|s| &**s);
let mut query = vec![
("secret", key),
("response", response),
];
if let Some(user_ip) = user_ip {
query.push(("remoteip", &user_ip));
}
let mut url = Url::parse("https://www.google.com/recaptcha/api/siteverify").unwrap();
url.set_query_from_pairs(query.into_iter());
let mut client = Client::new();
let mut response = try!(client.get(url).send());
let ref mut body = String::new();
try!(response.read_to_string(body));
let recaptcha_response = try!(json::decode::<RecaptchaResponse>(&body));
match (recaptcha_response.success, recaptcha_response.error_codes) {
(true, _) => Ok(()),
(false, Some(errors)) => Err(RecaptchaError::Codes(errors)),
(false, _) => Err(RecaptchaError::Codes(HashSet::new()))
}
}
#[test]
fn test_invalid_secret_missing_response() {
use RecaptchaError::*;
use RecaptchaErrorCode::*;
let resp = verify("", "", None);
match resp {
Ok(()) => panic!("unexpected response: Ok(())"),
Err(Codes(ref errors)) => {
assert!(errors.contains(&InvalidSecret));
assert!(errors.contains(&MissingResponse));
}
Err(e) => panic!("unexpected error: {}", e),
};
println!("{:?}", resp);
}
|
use std::{
cmp::Reverse,
collections::{BTreeMap, BTreeSet, BinaryHeap},
sync::Arc,
time::SystemTime,
};
use maplit::btreeset;
use ruma::{
events::EventType,
identifiers::{EventId, RoomId, RoomVersionId},
};
mod error;
pub mod event_auth;
pub mod room_version;
mod state_event;
mod state_store;
pub use error::{Error, Result};
pub use event_auth::{auth_check, auth_types_for_event};
pub use state_event::Event;
pub use state_store::StateStore;
// We want to yield to the reactor occasionally during state res when dealing
// with large data sets, so that we don't exhaust the reactor. This is done by
// yielding to reactor during loops every N iterations.
const _YIELD_AFTER_ITERATIONS: usize = 100;
/// A mapping of event type and state_key to some value `T`, usually an `EventId`.
pub type StateMap<T> = BTreeMap<(EventType, Option<String>), T>;
/// A mapping of `EventId` to `T`, usually a `ServerPdu`.
pub type EventMap<T> = BTreeMap<EventId, T>;
#[derive(Default)]
pub struct StateResolution;
impl StateResolution {
/// Check if the `incoming_event` can be included in the given `current_state`.
///
/// This will authenticate the event against the current state of the room. It
/// is important that the `current_state` argument is accurate and complete.
pub fn apply_event<E: Event>(
room_id: &RoomId,
room_version: &RoomVersionId,
incoming_event: Arc<E>,
current_state: &StateMap<EventId>,
event_map: &EventMap<Arc<E>>,
) -> Result<bool> {
tracing::info!("Applying a single event, state resolution starting");
let ev = incoming_event;
let prev_event = if let Some(id) = ev.prev_events().first() {
event_map.get(id).map(Arc::clone)
} else {
None
};
let mut auth_events = StateMap::new();
for key in
event_auth::auth_types_for_event(&ev.kind(), &ev.sender(), ev.state_key(), ev.content())
{
if let Some(ev_id) = current_state.get(&key) {
if let Ok(event) = StateResolution::get_or_load_event(room_id, ev_id, event_map) {
// TODO synapse checks `rejected_reason` is None here
auth_events.insert(key.clone(), event);
}
}
}
event_auth::auth_check(room_version, &ev, prev_event, &auth_events, None)
}
/// Resolve sets of state events as they come in. Internally `StateResolution` builds a graph
/// and an auth chain to allow for state conflict resolution.
///
/// ## Arguments
///
/// * `state_sets` - The incoming state to resolve. Each `StateMap` represents a possible fork
/// in the state of a room.
///
/// * `auth_events` - The full recursive set of `auth_events` for each event in the `state_sets`.
///
/// * `event_map` - The `EventMap` acts as a local cache of state, any event that is not found
/// in the `event_map` will be fetched from the `StateStore` and cached in the `event_map`. There
/// is no state kept from separate `resolve` calls, although this could be a potential optimization
/// in the future.
///
/// It is up the the caller to check that the events returned from `StateStore::get_event` are
/// events for the correct room (synapse checks that all events are in the right room).
pub fn resolve<E: Event>(
room_id: &RoomId,
room_version: &RoomVersionId,
state_sets: &[StateMap<EventId>],
auth_events: Vec<Vec<EventId>>,
event_map: &mut EventMap<Arc<E>>,
) -> Result<StateMap<EventId>> {
tracing::info!("State resolution starting");
// split non-conflicting and conflicting state
let (clean, conflicting) = StateResolution::separate(&state_sets);
tracing::info!("non conflicting {:?}", clean.len());
if conflicting.is_empty() {
tracing::info!("no conflicting state found");
return Ok(clean);
}
tracing::info!("{} conflicting events", conflicting.len());
// the set of auth events that are not common across server forks
let mut auth_diff = StateResolution::get_auth_chain_diff(room_id, &auth_events)?;
tracing::debug!("auth diff size {:?}", auth_diff);
// add the auth_diff to conflicting now we have a full set of conflicting events
auth_diff.extend(conflicting.values().cloned().flatten());
let mut all_conflicted = auth_diff
.into_iter()
.collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>();
tracing::info!("full conflicted set is {} events", all_conflicted.len());
// we used to check that all events are events from the correct room
// this is now a check the caller of `resolve` must make.
// synapse says `full_set = {eid for eid in full_conflicted_set if eid in event_map}`
//
// don't honor events we cannot "verify"
all_conflicted.retain(|id| event_map.contains_key(id));
// get only the control events with a state_key: "" or ban/kick event (sender != state_key)
let control_events = all_conflicted
.iter()
.filter(|id| is_power_event_id(id, &event_map))
.cloned()
.collect::<Vec<_>>();
// sort the control events based on power_level/clock/event_id and outgoing/incoming edges
let mut sorted_control_levels = StateResolution::reverse_topological_power_sort(
room_id,
&control_events,
event_map,
&all_conflicted,
);
tracing::debug!("SRTD {:?}", sorted_control_levels);
// sequentially auth check each control event.
let resolved_control = StateResolution::iterative_auth_check(
room_id,
room_version,
&sorted_control_levels,
&clean,
event_map,
)?;
tracing::debug!(
"AUTHED {:?}",
resolved_control
.iter()
.map(|(key, id)| (key, id.to_string()))
.collect::<Vec<_>>()
);
// At this point the control_events have been resolved we now have to
// sort the remaining events using the mainline of the resolved power level.
sorted_control_levels.dedup();
let deduped_power_ev = sorted_control_levels;
// This removes the control events that passed auth and more importantly those that failed auth
let events_to_resolve = all_conflicted
.iter()
.filter(|id| !deduped_power_ev.contains(id))
.cloned()
.collect::<Vec<_>>();
tracing::debug!(
"LEFT {:?}",
events_to_resolve
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
// This "epochs" power level event
let power_event = resolved_control.get(&(EventType::RoomPowerLevels, Some("".into())));
tracing::debug!("PL {:?}", power_event);
let sorted_left_events =
StateResolution::mainline_sort(room_id, &events_to_resolve, power_event, event_map);
tracing::debug!(
"SORTED LEFT {:?}",
sorted_left_events
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
let mut resolved_state = StateResolution::iterative_auth_check(
room_id,
room_version,
&sorted_left_events,
&resolved_control, // The control events are added to the final resolved state
event_map,
)?;
// add unconflicted state to the resolved state
// We priorities the unconflicting state
resolved_state.extend(clean);
Ok(resolved_state)
}
/// Split the events that have no conflicts from those that are conflicting.
/// The return tuple looks like `(unconflicted, conflicted)`.
///
/// State is determined to be conflicting if for the given key (EventType, StateKey) there
/// is not exactly one eventId. This includes missing events, if one state_set includes an event
/// that none of the other have this is a conflicting event.
pub fn separate(
state_sets: &[StateMap<EventId>],
) -> (StateMap<EventId>, StateMap<Vec<EventId>>) {
use itertools::Itertools;
tracing::info!(
"seperating {} sets of events into conflicted/unconflicted",
state_sets.len()
);
let mut unconflicted_state = StateMap::new();
let mut conflicted_state = StateMap::new();
for key in state_sets.iter().flat_map(|map| map.keys()).dedup() {
let mut event_ids = state_sets
.iter()
.map(|state_set| state_set.get(key))
.dedup()
.collect::<Vec<_>>();
if event_ids.len() == 1 {
if let Some(Some(id)) = event_ids.pop() {
unconflicted_state.insert(key.clone(), id.clone());
} else {
panic!()
}
} else {
conflicted_state.insert(
key.clone(),
event_ids.into_iter().flatten().cloned().collect::<Vec<_>>(),
);
}
}
(unconflicted_state, conflicted_state)
}
/// Returns a Vec of deduped EventIds that appear in some chains but not others.
pub fn get_auth_chain_diff(
_room_id: &RoomId,
auth_event_ids: &[Vec<EventId>],
) -> Result<Vec<EventId>> {
let mut chains = vec![];
for ids in auth_event_ids {
// TODO state store `auth_event_ids` returns self in the event ids list
// when an event returns `auth_event_ids` self is not contained
let chain = ids.iter().cloned().collect::<BTreeSet<_>>();
chains.push(chain);
}
if let Some(chain) = chains.first() {
let rest = chains.iter().skip(1).flatten().cloned().collect();
let common = chain.intersection(&rest).collect::<Vec<_>>();
Ok(chains
.iter()
.flatten()
.filter(|id| !common.contains(&id))
.cloned()
.collect::<BTreeSet<_>>()
.into_iter()
.collect())
} else {
Ok(vec![])
}
}
/// Events are sorted from "earliest" to "latest". They are compared using
/// the negative power level (reverse topological ordering), the
/// origin server timestamp and incase of a tie the `EventId`s
/// are compared lexicographically.
///
/// The power level is negative because a higher power level is equated to an
/// earlier (further back in time) origin server timestamp.
pub fn reverse_topological_power_sort<E: Event>(
room_id: &RoomId,
events_to_sort: &[EventId],
event_map: &mut EventMap<Arc<E>>,
auth_diff: &[EventId],
) -> Vec<EventId> {
tracing::debug!("reverse topological sort of power events");
let mut graph = BTreeMap::new();
for (idx, event_id) in events_to_sort.iter().enumerate() {
StateResolution::add_event_and_auth_chain_to_graph(
room_id, &mut graph, event_id, event_map, auth_diff,
);
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
// this is used in the `key_fn` passed to the lexico_topo_sort fn
let mut event_to_pl = BTreeMap::new();
for (idx, event_id) in graph.keys().enumerate() {
let pl = StateResolution::get_power_level_for_sender(room_id, &event_id, event_map);
tracing::info!("{} power level {}", event_id.to_string(), pl);
event_to_pl.insert(event_id.clone(), pl);
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
StateResolution::lexicographical_topological_sort(&graph, |event_id| {
// tracing::debug!("{:?}", event_map.get(event_id).unwrap().origin_server_ts());
let ev = event_map.get(event_id).unwrap();
let pl = event_to_pl.get(event_id).unwrap();
tracing::debug!("{:?}", (-*pl, ev.origin_server_ts(), &ev.event_id()));
// This return value is the key used for sorting events,
// events are then sorted by power level, time,
// and lexically by event_id.
(-*pl, ev.origin_server_ts(), ev.event_id().clone())
})
}
/// Sorts the event graph based on number of outgoing/incoming edges, where
/// `key_fn` is used as a tie breaker. The tie breaker happens based on
/// power level, age, and event_id.
pub fn lexicographical_topological_sort<F>(
graph: &BTreeMap<EventId, Vec<EventId>>,
key_fn: F,
) -> Vec<EventId>
where
F: Fn(&EventId) -> (i64, SystemTime, EventId),
{
tracing::info!("starting lexicographical topological sort");
// NOTE: an event that has no incoming edges happened most recently,
// and an event that has no outgoing edges happened least recently.
// NOTE: this is basically Kahn's algorithm except we look at nodes with no
// outgoing edges, c.f.
// https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm
// TODO make the BTreeSet conversion cleaner ??
// outdegree_map is an event referring to the events before it, the
// more outdegree's the more recent the event.
let mut outdegree_map: BTreeMap<EventId, BTreeSet<EventId>> = graph
.iter()
.map(|(k, v)| (k.clone(), v.iter().cloned().collect()))
.collect();
// The number of events that depend on the given event (the eventId key)
let mut reverse_graph = BTreeMap::new();
// Vec of nodes that have zero out degree, least recent events.
let mut zero_outdegree = vec![];
for (node, edges) in graph.iter() {
if edges.is_empty() {
// the `Reverse` is because rusts `BinaryHeap` sorts largest -> smallest we need
// smallest -> largest
zero_outdegree.push(Reverse((key_fn(node), node)));
}
reverse_graph.entry(node).or_insert(btreeset![]);
for edge in edges {
reverse_graph
.entry(edge)
.or_insert(btreeset![])
.insert(node);
}
}
let mut heap = BinaryHeap::from(zero_outdegree);
// we remove the oldest node (most incoming edges) and check against all other
let mut sorted = vec![];
// destructure the `Reverse` and take the smallest `node` each time
while let Some(Reverse((_, node))) = heap.pop() {
let node: &EventId = node;
for parent in reverse_graph.get(node).unwrap() {
// the number of outgoing edges this node has
let out = outdegree_map.get_mut(parent).unwrap();
// only push on the heap once older events have been cleared
out.remove(node);
if out.is_empty() {
heap.push(Reverse((key_fn(parent), parent)));
}
}
// synapse yields we push then return the vec
sorted.push(node.clone());
}
sorted
}
/// Find the power level for the sender of `event_id` or return a default value of zero.
fn get_power_level_for_sender<E: Event>(
room_id: &RoomId,
event_id: &EventId,
event_map: &mut EventMap<Arc<E>>,
) -> i64 {
tracing::info!("fetch event ({}) senders power level", event_id.to_string());
let event = StateResolution::get_or_load_event(room_id, event_id, event_map);
let mut pl = None;
// TODO store.auth_event_ids returns "self" with the event ids is this ok
// event.auth_event_ids does not include its own event id ?
for aid in event
.as_ref()
.map(|pdu| pdu.auth_events())
.unwrap_or_default()
{
if let Ok(aev) = StateResolution::get_or_load_event(room_id, &aid, event_map) {
if is_type_and_key(&aev, EventType::RoomPowerLevels, "") {
pl = Some(aev);
break;
}
}
}
if pl.is_none() {
return 0;
}
if let Some(content) = pl
.map(|pl| {
serde_json::from_value::<ruma::events::room::power_levels::PowerLevelsEventContent>(
pl.content(),
)
.ok()
})
.flatten()
{
if let Ok(ev) = event {
if let Some(user) = content.users.get(&ev.sender()) {
tracing::debug!("found {} at power_level {}", ev.sender().as_str(), user);
return (*user).into();
}
}
content.users_default.into()
} else {
0
}
}
/// Check the that each event is authenticated based on the events before it.
///
/// ## Returns
///
/// The `unconflicted_state` combined with the newly auth'ed events. So any event that
/// fails the `event_auth::auth_check` will be excluded from the returned `StateMap<EventId>`.
///
/// For each `events_to_check` event we gather the events needed to auth it from the
/// `event_map` or `store` and verify each event using the `event_auth::auth_check`
/// function.
pub fn iterative_auth_check<E: Event>(
room_id: &RoomId,
room_version: &RoomVersionId,
events_to_check: &[EventId],
unconflicted_state: &StateMap<EventId>,
event_map: &mut EventMap<Arc<E>>,
) -> Result<StateMap<EventId>> {
tracing::info!("starting iterative auth check");
tracing::debug!(
"performing auth checks on {:?}",
events_to_check
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
let mut resolved_state = unconflicted_state.clone();
for (idx, event_id) in events_to_check.iter().enumerate() {
let event = StateResolution::get_or_load_event(room_id, event_id, event_map)?;
let mut auth_events = BTreeMap::new();
for aid in &event.auth_events() {
if let Ok(ev) = StateResolution::get_or_load_event(room_id, &aid, event_map) {
// TODO what to do when no state_key is found ??
// TODO synapse check "rejected_reason", I'm guessing this is redacted_because in ruma ??
auth_events.insert((ev.kind(), ev.state_key()), ev);
} else {
tracing::warn!("auth event id for {} is missing {}", aid, event_id);
}
}
for key in event_auth::auth_types_for_event(
&event.kind(),
&event.sender(),
event.state_key(),
event.content(),
) {
if let Some(ev_id) = resolved_state.get(&key) {
if let Ok(event) = StateResolution::get_or_load_event(room_id, ev_id, event_map)
{
// TODO synapse checks `rejected_reason` is None here
auth_events.insert(key.clone(), event);
}
}
}
tracing::debug!("event to check {:?}", event.event_id().as_str());
let most_recent_prev_event = event
.prev_events()
.iter()
.filter_map(|id| StateResolution::get_or_load_event(room_id, id, event_map).ok())
.next_back();
// The key for this is (eventType + a state_key of the signed token not sender) so search
// for it
let current_third_party = auth_events.iter().find_map(|(_, pdu)| {
if pdu.kind() == EventType::RoomThirdPartyInvite {
Some(pdu.clone()) // TODO no clone, auth_events is borrowed while moved
} else {
None
}
});
if event_auth::auth_check(
room_version,
&event,
most_recent_prev_event,
&auth_events,
current_third_party,
)? {
// add event to resolved state map
resolved_state.insert((event.kind(), event.state_key()), event_id.clone());
} else {
// synapse passes here on AuthError. We do not add this event to resolved_state.
tracing::warn!(
"event {} failed the authentication check",
event_id.to_string()
);
}
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
Ok(resolved_state)
}
/// Returns the sorted `to_sort` list of `EventId`s based on a mainline sort using
/// the depth of `resolved_power_level`, the server timestamp, and the eventId.
///
/// The depth of the given event is calculated based on the depth of it's closest "parent"
/// power_level event. If there have been two power events the after the most recent are
/// depth 0, the events before (with the first power level as a parent) will be marked
/// as depth 1. depth 1 is "older" than depth 0.
pub fn mainline_sort<E: Event>(
room_id: &RoomId,
to_sort: &[EventId],
resolved_power_level: Option<&EventId>,
event_map: &mut EventMap<Arc<E>>,
) -> Vec<EventId> {
tracing::debug!("mainline sort of events");
// There are no EventId's to sort, bail.
if to_sort.is_empty() {
return vec![];
}
let mut mainline = vec![];
let mut pl = resolved_power_level.cloned();
let mut idx = 0;
while let Some(p) = pl {
mainline.push(p.clone());
let event = StateResolution::get_or_load_event(room_id, &p, event_map).unwrap();
let auth_events = &event.auth_events();
pl = None;
for aid in auth_events {
let ev = StateResolution::get_or_load_event(room_id, &aid, event_map).unwrap();
if is_type_and_key(&ev, EventType::RoomPowerLevels, "") {
pl = Some(aid.clone());
break;
}
}
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx != 0 && idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
idx += 1;
}
let mainline_map = mainline
.iter()
.rev()
.enumerate()
.map(|(idx, eid)| ((*eid).clone(), idx))
.collect::<BTreeMap<_, _>>();
let mut order_map = BTreeMap::new();
for (idx, ev_id) in to_sort.iter().enumerate() {
if let Ok(event) = StateResolution::get_or_load_event(room_id, ev_id, event_map) {
if let Ok(depth) = StateResolution::get_mainline_depth(
room_id,
Some(event),
&mainline_map,
event_map,
) {
order_map.insert(
ev_id,
(
depth,
event_map.get(ev_id).map(|ev| ev.origin_server_ts()),
ev_id, // TODO should this be a &str to sort lexically??
),
);
}
}
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
// sort the event_ids by their depth, timestamp and EventId
// unwrap is OK order map and sort_event_ids are from to_sort (the same Vec)
let mut sort_event_ids = order_map.keys().map(|&k| k.clone()).collect::<Vec<_>>();
sort_event_ids.sort_by_key(|sort_id| order_map.get(sort_id).unwrap());
sort_event_ids
}
/// Get the mainline depth from the `mainline_map` or finds a power_level event
/// that has an associated mainline depth.
fn get_mainline_depth<E: Event>(
room_id: &RoomId,
mut event: Option<Arc<E>>,
mainline_map: &EventMap<usize>,
event_map: &mut EventMap<Arc<E>>,
) -> Result<usize> {
while let Some(sort_ev) = event {
tracing::debug!("mainline event_id {}", sort_ev.event_id().to_string());
let id = &sort_ev.event_id();
if let Some(depth) = mainline_map.get(&id) {
return Ok(*depth);
}
// dbg!(&sort_ev);
let auth_events = &sort_ev.auth_events();
event = None;
for aid in auth_events {
// dbg!(&aid);
let aev = StateResolution::get_or_load_event(room_id, &aid, event_map)?;
if is_type_and_key(&aev, EventType::RoomPowerLevels, "") {
event = Some(aev);
break;
}
}
}
// Did not find a power level event so we default to zero
Ok(0)
}
fn add_event_and_auth_chain_to_graph<E: Event>(
room_id: &RoomId,
graph: &mut BTreeMap<EventId, Vec<EventId>>,
event_id: &EventId,
event_map: &mut EventMap<Arc<E>>,
auth_diff: &[EventId],
) {
let mut state = vec![event_id.clone()];
while !state.is_empty() {
// we just checked if it was empty so unwrap is fine
let eid = state.pop().unwrap();
graph.entry(eid.clone()).or_insert_with(Vec::new);
// prefer the store to event as the store filters dedups the events
// otherwise it seems we can loop forever
for aid in &StateResolution::get_or_load_event(room_id, &eid, event_map)
.unwrap()
.auth_events()
{
if auth_diff.contains(&aid) {
if !graph.contains_key(&aid) {
state.push(aid.clone());
}
// we just inserted this at the start of the while loop
graph.get_mut(&eid).unwrap().push(aid.clone());
}
}
}
}
/// Uses the `event_map` to return the full PDU or fails.
fn get_or_load_event<E: Event>(
_room_id: &RoomId,
ev_id: &EventId,
event_map: &EventMap<Arc<E>>,
) -> Result<Arc<E>> {
event_map.get(ev_id).map_or_else(
|| Err(Error::NotFound(format!("EventId: {:?} not found", ev_id))),
|e| Ok(Arc::clone(e)),
)
}
}
pub fn is_power_event_id<E: Event>(event_id: &EventId, event_map: &EventMap<Arc<E>>) -> bool {
match event_map.get(event_id) {
Some(state) => is_power_event(state),
_ => false,
}
}
pub fn is_type_and_key<E: Event>(ev: &Arc<E>, ev_type: EventType, state_key: &str) -> bool {
ev.kind() == ev_type && ev.state_key().as_deref() == Some(state_key)
}
pub fn is_power_event<E: Event>(event: &Arc<E>) -> bool {
use ruma::events::room::member::{MemberEventContent, MembershipState};
match event.kind() {
EventType::RoomPowerLevels | EventType::RoomJoinRules | EventType::RoomCreate => {
event.state_key() == Some("".into())
}
EventType::RoomMember => {
if let Ok(content) =
// TODO fix clone
serde_json::from_value::<MemberEventContent>(event.content())
{
if [MembershipState::Leave, MembershipState::Ban].contains(&content.membership) {
return event.sender().as_str()
// TODO is None here a failure
!= event.state_key().as_deref().unwrap_or("NOT A STATE KEY");
}
}
false
}
_ => false,
}
}
Update resolve methods docs for event_map
use std::{
cmp::Reverse,
collections::{BTreeMap, BTreeSet, BinaryHeap},
sync::Arc,
time::SystemTime,
};
use maplit::btreeset;
use ruma::{
events::EventType,
identifiers::{EventId, RoomId, RoomVersionId},
};
mod error;
pub mod event_auth;
pub mod room_version;
mod state_event;
mod state_store;
pub use error::{Error, Result};
pub use event_auth::{auth_check, auth_types_for_event};
pub use state_event::Event;
pub use state_store::StateStore;
// We want to yield to the reactor occasionally during state res when dealing
// with large data sets, so that we don't exhaust the reactor. This is done by
// yielding to reactor during loops every N iterations.
const _YIELD_AFTER_ITERATIONS: usize = 100;
/// A mapping of event type and state_key to some value `T`, usually an `EventId`.
pub type StateMap<T> = BTreeMap<(EventType, Option<String>), T>;
/// A mapping of `EventId` to `T`, usually a `ServerPdu`.
pub type EventMap<T> = BTreeMap<EventId, T>;
#[derive(Default)]
pub struct StateResolution;
impl StateResolution {
/// Check if the `incoming_event` can be included in the given `current_state`.
///
/// This will authenticate the event against the current state of the room. It
/// is important that the `current_state` argument is accurate and complete.
pub fn apply_event<E: Event>(
room_id: &RoomId,
room_version: &RoomVersionId,
incoming_event: Arc<E>,
current_state: &StateMap<EventId>,
event_map: &EventMap<Arc<E>>,
) -> Result<bool> {
tracing::info!("Applying a single event, state resolution starting");
let ev = incoming_event;
let prev_event = if let Some(id) = ev.prev_events().first() {
event_map.get(id).map(Arc::clone)
} else {
None
};
let mut auth_events = StateMap::new();
for key in
event_auth::auth_types_for_event(&ev.kind(), &ev.sender(), ev.state_key(), ev.content())
{
if let Some(ev_id) = current_state.get(&key) {
if let Ok(event) = StateResolution::get_or_load_event(room_id, ev_id, event_map) {
// TODO synapse checks `rejected_reason` is None here
auth_events.insert(key.clone(), event);
}
}
}
event_auth::auth_check(room_version, &ev, prev_event, &auth_events, None)
}
/// Resolve sets of state events as they come in. Internally `StateResolution` builds a graph
/// and an auth chain to allow for state conflict resolution.
///
/// ## Arguments
///
/// * `state_sets` - The incoming state to resolve. Each `StateMap` represents a possible fork
/// in the state of a room.
///
/// * `auth_events` - The full recursive set of `auth_events` for each event in the `state_sets`.
///
/// * `event_map` - The `EventMap` acts as a local cache of state, any event that is not found
/// in the `event_map` will cause an unrecoverable `Error` in `resolve`.
pub fn resolve<E: Event>(
room_id: &RoomId,
room_version: &RoomVersionId,
state_sets: &[StateMap<EventId>],
auth_events: Vec<Vec<EventId>>,
event_map: &mut EventMap<Arc<E>>,
) -> Result<StateMap<EventId>> {
tracing::info!("State resolution starting");
// split non-conflicting and conflicting state
let (clean, conflicting) = StateResolution::separate(&state_sets);
tracing::info!("non conflicting {:?}", clean.len());
if conflicting.is_empty() {
tracing::info!("no conflicting state found");
return Ok(clean);
}
tracing::info!("{} conflicting events", conflicting.len());
// the set of auth events that are not common across server forks
let mut auth_diff = StateResolution::get_auth_chain_diff(room_id, &auth_events)?;
tracing::debug!("auth diff size {:?}", auth_diff);
// add the auth_diff to conflicting now we have a full set of conflicting events
auth_diff.extend(conflicting.values().cloned().flatten());
let mut all_conflicted = auth_diff
.into_iter()
.collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>();
tracing::info!("full conflicted set is {} events", all_conflicted.len());
// we used to check that all events are events from the correct room
// this is now a check the caller of `resolve` must make.
// synapse says `full_set = {eid for eid in full_conflicted_set if eid in event_map}`
//
// don't honor events we cannot "verify"
all_conflicted.retain(|id| event_map.contains_key(id));
// get only the control events with a state_key: "" or ban/kick event (sender != state_key)
let control_events = all_conflicted
.iter()
.filter(|id| is_power_event_id(id, &event_map))
.cloned()
.collect::<Vec<_>>();
// sort the control events based on power_level/clock/event_id and outgoing/incoming edges
let mut sorted_control_levels = StateResolution::reverse_topological_power_sort(
room_id,
&control_events,
event_map,
&all_conflicted,
);
tracing::debug!("SRTD {:?}", sorted_control_levels);
// sequentially auth check each control event.
let resolved_control = StateResolution::iterative_auth_check(
room_id,
room_version,
&sorted_control_levels,
&clean,
event_map,
)?;
tracing::debug!(
"AUTHED {:?}",
resolved_control
.iter()
.map(|(key, id)| (key, id.to_string()))
.collect::<Vec<_>>()
);
// At this point the control_events have been resolved we now have to
// sort the remaining events using the mainline of the resolved power level.
sorted_control_levels.dedup();
let deduped_power_ev = sorted_control_levels;
// This removes the control events that passed auth and more importantly those that failed auth
let events_to_resolve = all_conflicted
.iter()
.filter(|id| !deduped_power_ev.contains(id))
.cloned()
.collect::<Vec<_>>();
tracing::debug!(
"LEFT {:?}",
events_to_resolve
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
// This "epochs" power level event
let power_event = resolved_control.get(&(EventType::RoomPowerLevels, Some("".into())));
tracing::debug!("PL {:?}", power_event);
let sorted_left_events =
StateResolution::mainline_sort(room_id, &events_to_resolve, power_event, event_map);
tracing::debug!(
"SORTED LEFT {:?}",
sorted_left_events
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
let mut resolved_state = StateResolution::iterative_auth_check(
room_id,
room_version,
&sorted_left_events,
&resolved_control, // The control events are added to the final resolved state
event_map,
)?;
// add unconflicted state to the resolved state
// We priorities the unconflicting state
resolved_state.extend(clean);
Ok(resolved_state)
}
/// Split the events that have no conflicts from those that are conflicting.
/// The return tuple looks like `(unconflicted, conflicted)`.
///
/// State is determined to be conflicting if for the given key (EventType, StateKey) there
/// is not exactly one eventId. This includes missing events, if one state_set includes an event
/// that none of the other have this is a conflicting event.
pub fn separate(
state_sets: &[StateMap<EventId>],
) -> (StateMap<EventId>, StateMap<Vec<EventId>>) {
use itertools::Itertools;
tracing::info!(
"seperating {} sets of events into conflicted/unconflicted",
state_sets.len()
);
let mut unconflicted_state = StateMap::new();
let mut conflicted_state = StateMap::new();
for key in state_sets.iter().flat_map(|map| map.keys()).dedup() {
let mut event_ids = state_sets
.iter()
.map(|state_set| state_set.get(key))
.dedup()
.collect::<Vec<_>>();
if event_ids.len() == 1 {
if let Some(Some(id)) = event_ids.pop() {
unconflicted_state.insert(key.clone(), id.clone());
} else {
panic!()
}
} else {
conflicted_state.insert(
key.clone(),
event_ids.into_iter().flatten().cloned().collect::<Vec<_>>(),
);
}
}
(unconflicted_state, conflicted_state)
}
/// Returns a Vec of deduped EventIds that appear in some chains but not others.
pub fn get_auth_chain_diff(
_room_id: &RoomId,
auth_event_ids: &[Vec<EventId>],
) -> Result<Vec<EventId>> {
let mut chains = vec![];
for ids in auth_event_ids {
// TODO state store `auth_event_ids` returns self in the event ids list
// when an event returns `auth_event_ids` self is not contained
let chain = ids.iter().cloned().collect::<BTreeSet<_>>();
chains.push(chain);
}
if let Some(chain) = chains.first() {
let rest = chains.iter().skip(1).flatten().cloned().collect();
let common = chain.intersection(&rest).collect::<Vec<_>>();
Ok(chains
.iter()
.flatten()
.filter(|id| !common.contains(&id))
.cloned()
.collect::<BTreeSet<_>>()
.into_iter()
.collect())
} else {
Ok(vec![])
}
}
/// Events are sorted from "earliest" to "latest". They are compared using
/// the negative power level (reverse topological ordering), the
/// origin server timestamp and incase of a tie the `EventId`s
/// are compared lexicographically.
///
/// The power level is negative because a higher power level is equated to an
/// earlier (further back in time) origin server timestamp.
pub fn reverse_topological_power_sort<E: Event>(
room_id: &RoomId,
events_to_sort: &[EventId],
event_map: &mut EventMap<Arc<E>>,
auth_diff: &[EventId],
) -> Vec<EventId> {
tracing::debug!("reverse topological sort of power events");
let mut graph = BTreeMap::new();
for (idx, event_id) in events_to_sort.iter().enumerate() {
StateResolution::add_event_and_auth_chain_to_graph(
room_id, &mut graph, event_id, event_map, auth_diff,
);
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
// this is used in the `key_fn` passed to the lexico_topo_sort fn
let mut event_to_pl = BTreeMap::new();
for (idx, event_id) in graph.keys().enumerate() {
let pl = StateResolution::get_power_level_for_sender(room_id, &event_id, event_map);
tracing::info!("{} power level {}", event_id.to_string(), pl);
event_to_pl.insert(event_id.clone(), pl);
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
StateResolution::lexicographical_topological_sort(&graph, |event_id| {
// tracing::debug!("{:?}", event_map.get(event_id).unwrap().origin_server_ts());
let ev = event_map.get(event_id).unwrap();
let pl = event_to_pl.get(event_id).unwrap();
tracing::debug!("{:?}", (-*pl, ev.origin_server_ts(), &ev.event_id()));
// This return value is the key used for sorting events,
// events are then sorted by power level, time,
// and lexically by event_id.
(-*pl, ev.origin_server_ts(), ev.event_id().clone())
})
}
/// Sorts the event graph based on number of outgoing/incoming edges, where
/// `key_fn` is used as a tie breaker. The tie breaker happens based on
/// power level, age, and event_id.
pub fn lexicographical_topological_sort<F>(
graph: &BTreeMap<EventId, Vec<EventId>>,
key_fn: F,
) -> Vec<EventId>
where
F: Fn(&EventId) -> (i64, SystemTime, EventId),
{
tracing::info!("starting lexicographical topological sort");
// NOTE: an event that has no incoming edges happened most recently,
// and an event that has no outgoing edges happened least recently.
// NOTE: this is basically Kahn's algorithm except we look at nodes with no
// outgoing edges, c.f.
// https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm
// TODO make the BTreeSet conversion cleaner ??
// outdegree_map is an event referring to the events before it, the
// more outdegree's the more recent the event.
let mut outdegree_map: BTreeMap<EventId, BTreeSet<EventId>> = graph
.iter()
.map(|(k, v)| (k.clone(), v.iter().cloned().collect()))
.collect();
// The number of events that depend on the given event (the eventId key)
let mut reverse_graph = BTreeMap::new();
// Vec of nodes that have zero out degree, least recent events.
let mut zero_outdegree = vec![];
for (node, edges) in graph.iter() {
if edges.is_empty() {
// the `Reverse` is because rusts `BinaryHeap` sorts largest -> smallest we need
// smallest -> largest
zero_outdegree.push(Reverse((key_fn(node), node)));
}
reverse_graph.entry(node).or_insert(btreeset![]);
for edge in edges {
reverse_graph
.entry(edge)
.or_insert(btreeset![])
.insert(node);
}
}
let mut heap = BinaryHeap::from(zero_outdegree);
// we remove the oldest node (most incoming edges) and check against all other
let mut sorted = vec![];
// destructure the `Reverse` and take the smallest `node` each time
while let Some(Reverse((_, node))) = heap.pop() {
let node: &EventId = node;
for parent in reverse_graph.get(node).unwrap() {
// the number of outgoing edges this node has
let out = outdegree_map.get_mut(parent).unwrap();
// only push on the heap once older events have been cleared
out.remove(node);
if out.is_empty() {
heap.push(Reverse((key_fn(parent), parent)));
}
}
// synapse yields we push then return the vec
sorted.push(node.clone());
}
sorted
}
/// Find the power level for the sender of `event_id` or return a default value of zero.
fn get_power_level_for_sender<E: Event>(
room_id: &RoomId,
event_id: &EventId,
event_map: &mut EventMap<Arc<E>>,
) -> i64 {
tracing::info!("fetch event ({}) senders power level", event_id.to_string());
let event = StateResolution::get_or_load_event(room_id, event_id, event_map);
let mut pl = None;
// TODO store.auth_event_ids returns "self" with the event ids is this ok
// event.auth_event_ids does not include its own event id ?
for aid in event
.as_ref()
.map(|pdu| pdu.auth_events())
.unwrap_or_default()
{
if let Ok(aev) = StateResolution::get_or_load_event(room_id, &aid, event_map) {
if is_type_and_key(&aev, EventType::RoomPowerLevels, "") {
pl = Some(aev);
break;
}
}
}
if pl.is_none() {
return 0;
}
if let Some(content) = pl
.map(|pl| {
serde_json::from_value::<ruma::events::room::power_levels::PowerLevelsEventContent>(
pl.content(),
)
.ok()
})
.flatten()
{
if let Ok(ev) = event {
if let Some(user) = content.users.get(&ev.sender()) {
tracing::debug!("found {} at power_level {}", ev.sender().as_str(), user);
return (*user).into();
}
}
content.users_default.into()
} else {
0
}
}
/// Check the that each event is authenticated based on the events before it.
///
/// ## Returns
///
/// The `unconflicted_state` combined with the newly auth'ed events. So any event that
/// fails the `event_auth::auth_check` will be excluded from the returned `StateMap<EventId>`.
///
/// For each `events_to_check` event we gather the events needed to auth it from the
/// `event_map` or `store` and verify each event using the `event_auth::auth_check`
/// function.
pub fn iterative_auth_check<E: Event>(
room_id: &RoomId,
room_version: &RoomVersionId,
events_to_check: &[EventId],
unconflicted_state: &StateMap<EventId>,
event_map: &mut EventMap<Arc<E>>,
) -> Result<StateMap<EventId>> {
tracing::info!("starting iterative auth check");
tracing::debug!(
"performing auth checks on {:?}",
events_to_check
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
);
let mut resolved_state = unconflicted_state.clone();
for (idx, event_id) in events_to_check.iter().enumerate() {
let event = StateResolution::get_or_load_event(room_id, event_id, event_map)?;
let mut auth_events = BTreeMap::new();
for aid in &event.auth_events() {
if let Ok(ev) = StateResolution::get_or_load_event(room_id, &aid, event_map) {
// TODO what to do when no state_key is found ??
// TODO synapse check "rejected_reason", I'm guessing this is redacted_because in ruma ??
auth_events.insert((ev.kind(), ev.state_key()), ev);
} else {
tracing::warn!("auth event id for {} is missing {}", aid, event_id);
}
}
for key in event_auth::auth_types_for_event(
&event.kind(),
&event.sender(),
event.state_key(),
event.content(),
) {
if let Some(ev_id) = resolved_state.get(&key) {
if let Ok(event) = StateResolution::get_or_load_event(room_id, ev_id, event_map)
{
// TODO synapse checks `rejected_reason` is None here
auth_events.insert(key.clone(), event);
}
}
}
tracing::debug!("event to check {:?}", event.event_id().as_str());
let most_recent_prev_event = event
.prev_events()
.iter()
.filter_map(|id| StateResolution::get_or_load_event(room_id, id, event_map).ok())
.next_back();
// The key for this is (eventType + a state_key of the signed token not sender) so search
// for it
let current_third_party = auth_events.iter().find_map(|(_, pdu)| {
if pdu.kind() == EventType::RoomThirdPartyInvite {
Some(pdu.clone()) // TODO no clone, auth_events is borrowed while moved
} else {
None
}
});
if event_auth::auth_check(
room_version,
&event,
most_recent_prev_event,
&auth_events,
current_third_party,
)? {
// add event to resolved state map
resolved_state.insert((event.kind(), event.state_key()), event_id.clone());
} else {
// synapse passes here on AuthError. We do not add this event to resolved_state.
tracing::warn!(
"event {} failed the authentication check",
event_id.to_string()
);
}
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
Ok(resolved_state)
}
/// Returns the sorted `to_sort` list of `EventId`s based on a mainline sort using
/// the depth of `resolved_power_level`, the server timestamp, and the eventId.
///
/// The depth of the given event is calculated based on the depth of it's closest "parent"
/// power_level event. If there have been two power events the after the most recent are
/// depth 0, the events before (with the first power level as a parent) will be marked
/// as depth 1. depth 1 is "older" than depth 0.
pub fn mainline_sort<E: Event>(
room_id: &RoomId,
to_sort: &[EventId],
resolved_power_level: Option<&EventId>,
event_map: &mut EventMap<Arc<E>>,
) -> Vec<EventId> {
tracing::debug!("mainline sort of events");
// There are no EventId's to sort, bail.
if to_sort.is_empty() {
return vec![];
}
let mut mainline = vec![];
let mut pl = resolved_power_level.cloned();
let mut idx = 0;
while let Some(p) = pl {
mainline.push(p.clone());
let event = StateResolution::get_or_load_event(room_id, &p, event_map).unwrap();
let auth_events = &event.auth_events();
pl = None;
for aid in auth_events {
let ev = StateResolution::get_or_load_event(room_id, &aid, event_map).unwrap();
if is_type_and_key(&ev, EventType::RoomPowerLevels, "") {
pl = Some(aid.clone());
break;
}
}
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx != 0 && idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
idx += 1;
}
let mainline_map = mainline
.iter()
.rev()
.enumerate()
.map(|(idx, eid)| ((*eid).clone(), idx))
.collect::<BTreeMap<_, _>>();
let mut order_map = BTreeMap::new();
for (idx, ev_id) in to_sort.iter().enumerate() {
if let Ok(event) = StateResolution::get_or_load_event(room_id, ev_id, event_map) {
if let Ok(depth) = StateResolution::get_mainline_depth(
room_id,
Some(event),
&mainline_map,
event_map,
) {
order_map.insert(
ev_id,
(
depth,
event_map.get(ev_id).map(|ev| ev.origin_server_ts()),
ev_id, // TODO should this be a &str to sort lexically??
),
);
}
}
// We yield occasionally when we're working with large data sets to
// ensure that we don't block the reactor loop for too long.
if idx % _YIELD_AFTER_ITERATIONS == 0 {
// yield clock.sleep(0)
}
}
// sort the event_ids by their depth, timestamp and EventId
// unwrap is OK order map and sort_event_ids are from to_sort (the same Vec)
let mut sort_event_ids = order_map.keys().map(|&k| k.clone()).collect::<Vec<_>>();
sort_event_ids.sort_by_key(|sort_id| order_map.get(sort_id).unwrap());
sort_event_ids
}
/// Get the mainline depth from the `mainline_map` or finds a power_level event
/// that has an associated mainline depth.
fn get_mainline_depth<E: Event>(
room_id: &RoomId,
mut event: Option<Arc<E>>,
mainline_map: &EventMap<usize>,
event_map: &mut EventMap<Arc<E>>,
) -> Result<usize> {
while let Some(sort_ev) = event {
tracing::debug!("mainline event_id {}", sort_ev.event_id().to_string());
let id = &sort_ev.event_id();
if let Some(depth) = mainline_map.get(&id) {
return Ok(*depth);
}
// dbg!(&sort_ev);
let auth_events = &sort_ev.auth_events();
event = None;
for aid in auth_events {
// dbg!(&aid);
let aev = StateResolution::get_or_load_event(room_id, &aid, event_map)?;
if is_type_and_key(&aev, EventType::RoomPowerLevels, "") {
event = Some(aev);
break;
}
}
}
// Did not find a power level event so we default to zero
Ok(0)
}
fn add_event_and_auth_chain_to_graph<E: Event>(
room_id: &RoomId,
graph: &mut BTreeMap<EventId, Vec<EventId>>,
event_id: &EventId,
event_map: &mut EventMap<Arc<E>>,
auth_diff: &[EventId],
) {
let mut state = vec![event_id.clone()];
while !state.is_empty() {
// we just checked if it was empty so unwrap is fine
let eid = state.pop().unwrap();
graph.entry(eid.clone()).or_insert_with(Vec::new);
// prefer the store to event as the store filters dedups the events
// otherwise it seems we can loop forever
for aid in &StateResolution::get_or_load_event(room_id, &eid, event_map)
.unwrap()
.auth_events()
{
if auth_diff.contains(&aid) {
if !graph.contains_key(&aid) {
state.push(aid.clone());
}
// we just inserted this at the start of the while loop
graph.get_mut(&eid).unwrap().push(aid.clone());
}
}
}
}
/// Uses the `event_map` to return the full PDU or fails.
fn get_or_load_event<E: Event>(
_room_id: &RoomId,
ev_id: &EventId,
event_map: &EventMap<Arc<E>>,
) -> Result<Arc<E>> {
event_map.get(ev_id).map_or_else(
|| Err(Error::NotFound(format!("EventId: {:?} not found", ev_id))),
|e| Ok(Arc::clone(e)),
)
}
}
pub fn is_power_event_id<E: Event>(event_id: &EventId, event_map: &EventMap<Arc<E>>) -> bool {
match event_map.get(event_id) {
Some(state) => is_power_event(state),
_ => false,
}
}
pub fn is_type_and_key<E: Event>(ev: &Arc<E>, ev_type: EventType, state_key: &str) -> bool {
ev.kind() == ev_type && ev.state_key().as_deref() == Some(state_key)
}
pub fn is_power_event<E: Event>(event: &Arc<E>) -> bool {
use ruma::events::room::member::{MemberEventContent, MembershipState};
match event.kind() {
EventType::RoomPowerLevels | EventType::RoomJoinRules | EventType::RoomCreate => {
event.state_key() == Some("".into())
}
EventType::RoomMember => {
if let Ok(content) =
// TODO fix clone
serde_json::from_value::<MemberEventContent>(event.content())
{
if [MembershipState::Leave, MembershipState::Ban].contains(&content.membership) {
return event.sender().as_str()
// TODO is None here a failure
!= event.state_key().as_deref().unwrap_or("NOT A STATE KEY");
}
}
false
}
_ => false,
}
}
|
#![feature(conservative_impl_trait)]
pub trait DOMNode: Sized {
fn get_attribute(&self, _index: usize) -> Option<&KeyValue> { None }
fn attributes<'a>(&'a self) -> AttributeIter<'a, Self> {
AttributeIter { node: self, index: 0 }
}
fn with_attributes<A: AsRef<[KeyValue]>>(self, attrs: A) -> WithAttributes<Self, A> {
WithAttributes { node: self, attributes: attrs }
}
fn process_children<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () where Self: Sized;
fn value<'a>(&'a self) -> DOMValue<'a>;
}
type KeyValue = (&'static str, &'static str);
pub enum DOMValue<'a> {
Element { tag: &'a str },
Text(&'a str),
}
pub struct WithAttributes<T: DOMNode, A: AsRef<[KeyValue]>> {
node: T,
attributes: A,
}
impl<T, A> DOMNode for WithAttributes<T, A> where T: DOMNode, A: AsRef<[KeyValue]> {
fn get_attribute(&self, index: usize) -> Option<&KeyValue> {
let attributes = self.attributes.as_ref();
attributes
.get(index)
.or_else(|| self.node.get_attribute(index - attributes.len()))
}
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> { self.node.value() }
}
pub struct AttributeIter<'a, T: DOMNode + 'a> {
node: &'a T,
index: usize,
}
impl<'a, T: DOMNode> Iterator for AttributeIter<'a, T> {
type Item = &'a (&'static str, &'static str);
fn next(&mut self) -> Option<Self::Item> {
let res = self.node.get_attribute(self.index);
self.index += 1;
res
}
}
impl<'a, T: DOMNode> DOMNode for &'a T {
fn process_children<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
(*self).process_children::<P>(acc);
}
fn value<'b>(&'b self) -> DOMValue<'b> { (*self).value() }
}
impl DOMNode for String {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> { DOMValue::Text(self) }
}
impl DOMNode for &'static str {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> { DOMValue::Text(self) }
}
pub mod tags {
use super::{DOMNode, DOMNodeProcessor, DOMNodes, DOMValue, KeyValue};
pub trait TagProperties {
type Children: DOMNodes;
type Attributes: AsRef<[KeyValue]>;
fn properties(self) -> (Self::Children, Self::Attributes);
}
impl<C: DOMNodes> TagProperties for C {
type Children = Self;
type Attributes = [KeyValue; 0];
fn properties(self) -> (Self::Children, Self::Attributes) {
(
self,
[],
)
}
}
pub struct Attrs<A: AsRef<[KeyValue]>>(A);
impl<C: DOMNodes, A: AsRef<[KeyValue]>> TagProperties for (Attrs<A>, C) {
type Children = C;
type Attributes = A;
fn properties(self) -> (Self::Children, Self::Attributes) {
(
self.1,
(self.0).0,
)
}
}
pub struct Tag<C: DOMNodes, A: AsRef<[KeyValue]>> {
tagname: &'static str,
contents: C,
attributes: A,
}
impl<C: DOMNodes, A: AsRef<[KeyValue]>> DOMNode for Tag<C, A> {
fn get_attribute(&self, index: usize) -> Option<&KeyValue> {
self.attributes.as_ref().get(index)
}
fn process_children<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
self.contents.process_all::<P>(acc);
}
fn value<'a>(&'a self) -> DOMValue<'a> {
DOMValue::Element {
tag: self.tagname,
}
}
}
macro_rules! impl_tags {
($($tagname:ident),*) => { $(
pub fn $tagname<T: TagProperties>(properties: T) -> Tag<T::Children, T::Attributes> {
let (contents, attributes) = properties.properties();
Tag {
tagname: stringify!($tagname),
contents: contents,
attributes: attributes,
}
}
)* }
}
impl_tags!(
a, abbr, acronym, address, applet, area, article, aside, audio, b, base, basefont, bdi,
bdo, big, blockquote, body, br, button, canvas, caption, center, cite, code, col, colgroup,
datalist, dd, del, details, dfn, dialog, dir, div, dl, dt, em, embed, fieldset,
figcaption, figure, font, footer, form, frame, framset, h1, h2, h3, h4, h5, h6, head,
header, hr, i, iframe, img, input, ins, kbd, keygen, label, legend, li, link, main, map,
mark, menu, menuitem, meta, meter, nav, noframes, noscript, object, ol, optgroup, option,
output, p, param, pre, progress, q, rp, rt, ruby, s, samp, script, section, select, small,
source, span, strike, strong, style, sub, summary, sup, table, tbody, td, textarea, tfoot,
th, thead, time, title, tr, track, tt, u, ul, var, video, wbr
);
}
/// Processor of a `DOMNode`
pub trait DOMNodeProcessor {
/// Accumulator
type Acc;
/// Folding function
fn get_processor<T: DOMNode>() -> fn(&mut Self::Acc, &T) -> ();
}
pub trait DOMNodes {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> ();
}
impl DOMNodes for () {
fn process_all<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
}
impl<T: DOMNode> DOMNodes for T {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
P::get_processor()(acc, self);
}
}
impl<T: DOMNodes> DOMNodes for [T] {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
for x in self {
x.process_all::<P>(acc);
}
}
}
impl<T: DOMNodes> DOMNodes for Vec<T> {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
for x in self {
x.process_all::<P>(acc);
}
}
}
macro_rules! array_impls {
($($len:expr,)*) => { $(
impl<T: DOMNodes> DOMNodes for [T; $len] {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
for x in self {
x.process_all::<P>(acc);
}
}
}
)* }
}
array_impls!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
);
// Credit to @shepmaster for structure of recursive tuple macro
macro_rules! tuple_impls {
() => {};
// Copywrite @shepmaster
(($idx:tt => $typ:ident), $( ($nidx:tt => $ntyp:ident), )*) => {
/*
* Invoke recursive reversal of list that ends in the macro expansion implementation
* of the reversed list
*/
tuple_impls!([($idx, $typ);] $( ($nidx => $ntyp), )*);
tuple_impls!($( ($nidx => $ntyp), )*); // invoke macro on tail
};
/*
* ([accumulatedList], listToReverse); recursively calls tuple_impls until the list to reverse
+ is empty (see next pattern)
*/
([$(($accIdx: tt, $accTyp: ident);)+] ($idx:tt => $typ:ident), $( ($nidx:tt => $ntyp:ident), )*) => {
tuple_impls!([($idx, $typ); $(($accIdx, $accTyp); )*] $( ($nidx => $ntyp), ) *);
};
// Finally expand into the implementation
([($idx:tt, $typ:ident); $( ($nidx:tt, $ntyp:ident); )*]) => {
impl<$typ, $( $ntyp ),*> DOMNodes for ($typ, $( $ntyp ),*)
where $typ: DOMNodes,
$( $ntyp: DOMNodes),*
{
fn process_all<P>(&self, acc: &mut P::Acc) -> ()
where P: DOMNodeProcessor {
&self.$idx.process_all::<P>(acc);
$(
&self.$nidx.process_all::<P>(acc);
)*
}
}
}
}
tuple_impls!(
(9 => J),
(8 => I),
(7 => H),
(6 => G),
(5 => F),
(4 => E),
(3 => D),
(2 => C),
(1 => B),
(0 => A),
);
pub mod html_string {
use super::{DOMNode, DOMNodeProcessor, DOMValue};
pub struct HtmlStringBuilder;
impl DOMNodeProcessor for HtmlStringBuilder {
type Acc = String;
fn get_processor<T: DOMNode>() -> fn(&mut Self::Acc, &T) -> () {
fn add_node<T: DOMNode>(string: &mut String, node: &T) {
match node.value() {
DOMValue::Element { tag: tagname } => {
string.push_str("<");
string.push_str(tagname);
for attr in node.attributes() {
string.push_str(" ");
string.push_str(attr.0);
string.push_str("=\"");
string.push_str(attr.1);
string.push_str("\"")
}
string.push_str(">");
node.process_children::<HtmlStringBuilder>(string);
string.push_str("</");
string.push_str(tagname);
string.push_str(">");
}
DOMValue::Text(text) => {
// TODO: HTML escaping
string.push_str(text);
}
}
}
add_node
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::tags::*;
use super::html_string::*;
struct BogusOne;
impl DOMNode for BogusOne {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> {
DOMValue::Element { tag: "bogus_tag_one" }
}
}
struct BogusTwo;
impl DOMNode for BogusTwo {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> {
DOMValue::Element { tag: "bogus_tag_two" }
}
}
struct ChildCounter;
impl DOMNodeProcessor for ChildCounter {
type Acc = usize;
fn get_processor<T: DOMNode>() -> fn(&mut Self::Acc, &T) -> () {
fn incr<T: DOMNode>(count: &mut usize, _node: &T) {
*count += 1;
}
incr
}
}
fn html_sample() -> impl DOMNode {
div ((
BogusOne,
BogusOne,
BogusTwo,
table ((
"something",
th (()),
tr (()),
tr (()),
)),
))
}
#[test]
fn counts_children() {
let mut count = 0;
(BogusOne, &BogusOne, &BogusTwo).process_all::<ChildCounter>(&mut count);
assert_eq!(3, count);
count = 0;
(BogusOne, (BogusOne,), BogusOne).process_all::<ChildCounter>(&mut count);
assert_eq!(3, count);
count = 0;
[BogusOne, BogusOne, BogusOne].process_all::<ChildCounter>(&mut count);
assert_eq!(3, count);
count = 0;
(BogusOne, BogusOne,
[BogusOne, BogusOne, BogusOne],
[(BogusOne)],
vec![(), (), ()],
[&BogusTwo, &BogusTwo, &BogusTwo],
).process_all::<ChildCounter>(&mut count);
assert_eq!(9, count);
let sample = html_sample();
count = 0;
sample.process_all::<ChildCounter>(&mut count);
assert_eq!(1, count);
count = 0;
sample.process_children::<ChildCounter>(&mut count);
assert_eq!(4, count);
}
#[test]
fn builds_string() {
let mut string = String::new();
html_sample().process_all::<HtmlStringBuilder>(&mut string);
assert_eq!(
r#"
<div>
<bogus_tag_one></bogus_tag_one>
<bogus_tag_one></bogus_tag_one>
<bogus_tag_two></bogus_tag_two>
<table>
something
<th></th>
<tr></tr>
<tr></tr>
</table>
</div>
"#.chars().filter(|c| !c.is_whitespace()).collect::<String>(),
string.to_lowercase()
);
}
#[test]
fn builds_attribute_list() {
let div = div(())
.with_attributes([("attr2", "key2"), ("attr3", "key3")])
.with_attributes([("attr1", "key1")]);
assert_eq!(div.get_attribute(0), Some(&("attr1", "key1")));
assert_eq!(div.get_attribute(1), Some(&("attr2", "key2")));
assert_eq!(div.get_attribute(2), Some(&("attr3", "key3")));
assert_eq!(div.get_attribute(3), None);
let mut attr_iter = div.attributes();
assert_eq!(attr_iter.next(), Some(&("attr1", "key1")));
assert_eq!(attr_iter.next(), Some(&("attr2", "key2")));
assert_eq!(attr_iter.next(), Some(&("attr3", "key3")));
assert_eq!(attr_iter.next(), None);
}
}
Add test for alternative attribute syntax
#![feature(conservative_impl_trait)]
pub trait DOMNode: Sized {
fn get_attribute(&self, _index: usize) -> Option<&KeyValue> { None }
fn attributes<'a>(&'a self) -> AttributeIter<'a, Self> {
AttributeIter { node: self, index: 0 }
}
fn with_attributes<A: AsRef<[KeyValue]>>(self, attrs: A) -> WithAttributes<Self, A> {
WithAttributes { node: self, attributes: attrs }
}
fn process_children<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () where Self: Sized;
fn value<'a>(&'a self) -> DOMValue<'a>;
}
type KeyValue = (&'static str, &'static str);
pub enum DOMValue<'a> {
Element { tag: &'a str },
Text(&'a str),
}
pub struct WithAttributes<T: DOMNode, A: AsRef<[KeyValue]>> {
node: T,
attributes: A,
}
impl<T, A> DOMNode for WithAttributes<T, A> where T: DOMNode, A: AsRef<[KeyValue]> {
fn get_attribute(&self, index: usize) -> Option<&KeyValue> {
let attributes = self.attributes.as_ref();
attributes
.get(index)
.or_else(|| self.node.get_attribute(index - attributes.len()))
}
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> { self.node.value() }
}
pub struct AttributeIter<'a, T: DOMNode + 'a> {
node: &'a T,
index: usize,
}
impl<'a, T: DOMNode> Iterator for AttributeIter<'a, T> {
type Item = &'a (&'static str, &'static str);
fn next(&mut self) -> Option<Self::Item> {
let res = self.node.get_attribute(self.index);
self.index += 1;
res
}
}
impl<'a, T: DOMNode> DOMNode for &'a T {
fn process_children<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
(*self).process_children::<P>(acc);
}
fn value<'b>(&'b self) -> DOMValue<'b> { (*self).value() }
}
impl DOMNode for String {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> { DOMValue::Text(self) }
}
impl DOMNode for &'static str {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> { DOMValue::Text(self) }
}
pub mod tags {
use super::{DOMNode, DOMNodeProcessor, DOMNodes, DOMValue, KeyValue};
pub trait TagProperties {
type Children: DOMNodes;
type Attributes: AsRef<[KeyValue]>;
fn properties(self) -> (Self::Children, Self::Attributes);
}
impl<C: DOMNodes> TagProperties for C {
type Children = Self;
type Attributes = [KeyValue; 0];
fn properties(self) -> (Self::Children, Self::Attributes) {
(
self,
[],
)
}
}
pub struct Attrs<A: AsRef<[KeyValue]>>(pub A);
impl<C: DOMNodes, A: AsRef<[KeyValue]>> TagProperties for (Attrs<A>, C) {
type Children = C;
type Attributes = A;
fn properties(self) -> (Self::Children, Self::Attributes) {
(
self.1,
(self.0).0,
)
}
}
pub struct Tag<C: DOMNodes, A: AsRef<[KeyValue]>> {
tagname: &'static str,
contents: C,
attributes: A,
}
impl<C: DOMNodes, A: AsRef<[KeyValue]>> DOMNode for Tag<C, A> {
fn get_attribute(&self, index: usize) -> Option<&KeyValue> {
self.attributes.as_ref().get(index)
}
fn process_children<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
self.contents.process_all::<P>(acc);
}
fn value<'a>(&'a self) -> DOMValue<'a> {
DOMValue::Element {
tag: self.tagname,
}
}
}
macro_rules! impl_tags {
($($tagname:ident),*) => { $(
pub fn $tagname<T: TagProperties>(properties: T) -> Tag<T::Children, T::Attributes> {
let (contents, attributes) = properties.properties();
Tag {
tagname: stringify!($tagname),
contents: contents,
attributes: attributes,
}
}
)* }
}
impl_tags!(
a, abbr, acronym, address, applet, area, article, aside, audio, b, base, basefont, bdi,
bdo, big, blockquote, body, br, button, canvas, caption, center, cite, code, col, colgroup,
datalist, dd, del, details, dfn, dialog, dir, div, dl, dt, em, embed, fieldset,
figcaption, figure, font, footer, form, frame, framset, h1, h2, h3, h4, h5, h6, head,
header, hr, i, iframe, img, input, ins, kbd, keygen, label, legend, li, link, main, map,
mark, menu, menuitem, meta, meter, nav, noframes, noscript, object, ol, optgroup, option,
output, p, param, pre, progress, q, rp, rt, ruby, s, samp, script, section, select, small,
source, span, strike, strong, style, sub, summary, sup, table, tbody, td, textarea, tfoot,
th, thead, time, title, tr, track, tt, u, ul, var, video, wbr
);
}
/// Processor of a `DOMNode`
pub trait DOMNodeProcessor {
/// Accumulator
type Acc;
/// Folding function
fn get_processor<T: DOMNode>() -> fn(&mut Self::Acc, &T) -> ();
}
pub trait DOMNodes {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> ();
}
impl DOMNodes for () {
fn process_all<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
}
impl<T: DOMNode> DOMNodes for T {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
P::get_processor()(acc, self);
}
}
impl<T: DOMNodes> DOMNodes for [T] {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
for x in self {
x.process_all::<P>(acc);
}
}
}
impl<T: DOMNodes> DOMNodes for Vec<T> {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
for x in self {
x.process_all::<P>(acc);
}
}
}
macro_rules! array_impls {
($($len:expr,)*) => { $(
impl<T: DOMNodes> DOMNodes for [T; $len] {
fn process_all<P: DOMNodeProcessor>(&self, acc: &mut P::Acc) -> () {
for x in self {
x.process_all::<P>(acc);
}
}
}
)* }
}
array_impls!(
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
);
// Credit to @shepmaster for structure of recursive tuple macro
macro_rules! tuple_impls {
() => {};
// Copywrite @shepmaster
(($idx:tt => $typ:ident), $( ($nidx:tt => $ntyp:ident), )*) => {
/*
* Invoke recursive reversal of list that ends in the macro expansion implementation
* of the reversed list
*/
tuple_impls!([($idx, $typ);] $( ($nidx => $ntyp), )*);
tuple_impls!($( ($nidx => $ntyp), )*); // invoke macro on tail
};
/*
* ([accumulatedList], listToReverse); recursively calls tuple_impls until the list to reverse
+ is empty (see next pattern)
*/
([$(($accIdx: tt, $accTyp: ident);)+] ($idx:tt => $typ:ident), $( ($nidx:tt => $ntyp:ident), )*) => {
tuple_impls!([($idx, $typ); $(($accIdx, $accTyp); )*] $( ($nidx => $ntyp), ) *);
};
// Finally expand into the implementation
([($idx:tt, $typ:ident); $( ($nidx:tt, $ntyp:ident); )*]) => {
impl<$typ, $( $ntyp ),*> DOMNodes for ($typ, $( $ntyp ),*)
where $typ: DOMNodes,
$( $ntyp: DOMNodes),*
{
fn process_all<P>(&self, acc: &mut P::Acc) -> ()
where P: DOMNodeProcessor {
&self.$idx.process_all::<P>(acc);
$(
&self.$nidx.process_all::<P>(acc);
)*
}
}
}
}
tuple_impls!(
(9 => J),
(8 => I),
(7 => H),
(6 => G),
(5 => F),
(4 => E),
(3 => D),
(2 => C),
(1 => B),
(0 => A),
);
pub mod html_string {
use super::{DOMNode, DOMNodeProcessor, DOMValue};
pub struct HtmlStringBuilder;
impl DOMNodeProcessor for HtmlStringBuilder {
type Acc = String;
fn get_processor<T: DOMNode>() -> fn(&mut Self::Acc, &T) -> () {
fn add_node<T: DOMNode>(string: &mut String, node: &T) {
match node.value() {
DOMValue::Element { tag: tagname } => {
string.push_str("<");
string.push_str(tagname);
for attr in node.attributes() {
string.push_str(" ");
string.push_str(attr.0);
string.push_str("=\"");
string.push_str(attr.1);
string.push_str("\"")
}
string.push_str(">");
node.process_children::<HtmlStringBuilder>(string);
string.push_str("</");
string.push_str(tagname);
string.push_str(">");
}
DOMValue::Text(text) => {
// TODO: HTML escaping
string.push_str(text);
}
}
}
add_node
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::tags::*;
use super::html_string::*;
struct BogusOne;
impl DOMNode for BogusOne {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> {
DOMValue::Element { tag: "bogus_tag_one" }
}
}
struct BogusTwo;
impl DOMNode for BogusTwo {
fn process_children<P: DOMNodeProcessor>(&self, _acc: &mut P::Acc) -> () {}
fn value<'a>(&'a self) -> DOMValue<'a> {
DOMValue::Element { tag: "bogus_tag_two" }
}
}
struct ChildCounter;
impl DOMNodeProcessor for ChildCounter {
type Acc = usize;
fn get_processor<T: DOMNode>() -> fn(&mut Self::Acc, &T) -> () {
fn incr<T: DOMNode>(count: &mut usize, _node: &T) {
*count += 1;
}
incr
}
}
fn html_sample() -> impl DOMNode {
div ((
BogusOne,
BogusOne,
BogusTwo,
table ((
"something",
th (()),
tr (()),
tr (()),
)),
))
}
#[test]
fn counts_children() {
let mut count = 0;
(BogusOne, &BogusOne, &BogusTwo).process_all::<ChildCounter>(&mut count);
assert_eq!(3, count);
count = 0;
(BogusOne, (BogusOne,), BogusOne).process_all::<ChildCounter>(&mut count);
assert_eq!(3, count);
count = 0;
[BogusOne, BogusOne, BogusOne].process_all::<ChildCounter>(&mut count);
assert_eq!(3, count);
count = 0;
(BogusOne, BogusOne,
[BogusOne, BogusOne, BogusOne],
[(BogusOne)],
vec![(), (), ()],
[&BogusTwo, &BogusTwo, &BogusTwo],
).process_all::<ChildCounter>(&mut count);
assert_eq!(9, count);
let sample = html_sample();
count = 0;
sample.process_all::<ChildCounter>(&mut count);
assert_eq!(1, count);
count = 0;
sample.process_children::<ChildCounter>(&mut count);
assert_eq!(4, count);
}
#[test]
fn builds_string() {
let mut string = String::new();
html_sample().process_all::<HtmlStringBuilder>(&mut string);
assert_eq!(
r#"
<div>
<bogus_tag_one></bogus_tag_one>
<bogus_tag_one></bogus_tag_one>
<bogus_tag_two></bogus_tag_two>
<table>
something
<th></th>
<tr></tr>
<tr></tr>
</table>
</div>
"#.chars().filter(|c| !c.is_whitespace()).collect::<String>(),
string.to_lowercase()
);
}
fn check_attribute_list<T: DOMNode>(div: T) {
assert_eq!(div.get_attribute(0), Some(&("attr1", "val1")));
assert_eq!(div.get_attribute(1), Some(&("attr2", "val2")));
assert_eq!(div.get_attribute(2), Some(&("attr3", "val3")));
assert_eq!(div.get_attribute(3), None);
let mut attr_iter = div.attributes();
assert_eq!(attr_iter.next(), Some(&("attr1", "val1")));
assert_eq!(attr_iter.next(), Some(&("attr2", "val2")));
assert_eq!(attr_iter.next(), Some(&("attr3", "val3")));
assert_eq!(attr_iter.next(), None);
}
#[test]
fn builds_attribute_list() {
let div1 = div(())
.with_attributes([("attr2", "val2"), ("attr3", "val3")])
.with_attributes([("attr1", "val1")]);
check_attribute_list(div1);
let div2 = div((
Attrs([("attr2", "val2"), ("attr3", "val3")]),
div(())
)).with_attributes([("attr1", "val1")]);
check_attribute_list(div2);
}
}
|
//! RethinkDB protocol implementation in Rust
extern crate protobuf;
extern crate serde_json;
pub mod proto;
use serde_json::value::{Value, ToJson};
use protobuf::repeated::RepeatedField;
use proto::{
Term, Datum,
Datum_DatumType as DT,
Term_TermType as TT,
};
impl Term {
fn is_empty(&self) -> bool {
*self == Term::new()
}
}
pub trait ToTerm {
fn to_term(&self) -> Term;
}
impl<T: ToJson> ToTerm for T {
fn to_term(&self) -> Term {
// Datum
let mut datum = Datum::new();
match self.to_json() {
Value::String(val) => {
datum.set_field_type(DT::R_STR);
datum.set_r_str(val);
},
Value::Bool(val) => {
datum.set_field_type(DT::R_BOOL);
datum.set_r_bool(val);
},
Value::I64(val) => {
datum.set_field_type(DT::R_NUM);
datum.set_r_num(val as f64);
},
Value::U64(val) => {
datum.set_field_type(DT::R_NUM);
datum.set_r_num(val as f64);
},
Value::F64(val) => {
datum.set_field_type(DT::R_NUM);
datum.set_r_num(val);
},
Value::Array(_val) => {
unimplemented!();
},
Value::Object(_val) => {
unimplemented!();
},
Value::Null => {
datum.set_field_type(DT::R_NULL);
},
}
// Term
let mut term = Term::new();
term.set_field_type(TT::DATUM);
term.set_datum(datum);
term
}
}
impl ToTerm for Term {
fn to_term(&self) -> Term {
self.clone()
}
}
macro_rules! command {
($T:expr, $cmd:expr, $args:expr, $opts:expr) => {{
let mut term = Term::new();
term.set_field_type($T);
let mut args = Vec::new();
if !$cmd.is_empty() {
args.push($cmd);
}
if let Some(list) = $args {
args.extend(list);
}
/*
if let Some(_opt) = $opts {
unimplemented!();
}
*/
if !args.is_empty() {
let args = RepeatedField::from_vec(args);
term.set_args(args);
}
From::from(term)
}}
}
macro_rules! closure_par {
() => {{
// ID
let mut id = Datum::new();
id.set_field_type(DT::R_NUM);
id.set_r_num(1.0);
// DATUM
let mut datum = Term::new();
datum.set_field_type(TT::DATUM);
datum.set_datum(id);
// VAR
let mut var = Term::new();
var.set_field_type(TT::VAR);
let args = RepeatedField::from_vec(vec![datum]);
var.set_args(args);
From::from(var)
}}
}
macro_rules! closure_arg {
($res:expr) => {{
// ID
let mut id = Datum::new();
id.set_field_type(DT::R_NUM);
id.set_r_num(1.0);
// ARRAY
let mut array = Datum::new();
array.set_field_type(DT::R_ARRAY);
let args = RepeatedField::from_vec(vec![id]);
array.set_r_array(args);
// DATUM
let mut datum = Term::new();
datum.set_field_type(TT::DATUM);
datum.set_datum(array);
// FUNC
let mut func = Term::new();
func.set_field_type(TT::FUNC);
let args = RepeatedField::from_vec(vec![datum, $res]);
func.set_args(args);
func
}}
}
pub trait Command
where Self: Sized + From<Term> + Into<Term>
{
fn db<T: ToTerm>(self, arg: T) -> Self {
let cmd = self.into();
let arg = arg.to_term();
command!(TT::DB, cmd, Some(vec![arg]), None)
}
fn table<T: ToTerm>(self, arg: T) -> Self {
let cmd = self.into();
let arg = arg.to_term();
command!(TT::TABLE, cmd, Some(vec![arg]), None)
}
fn get_field<T: ToTerm>(self, arg: T) -> Self {
let cmd = self.into();
let arg = arg.to_term();
command!(TT::GET_FIELD, cmd, Some(vec![arg]), None)
}
fn map<F>(self, func: F) -> Self
where F: Fn(Self) -> Self
{
let cmd = self.into();
let arg = func(closure_par!()).into();
command!(TT::MAP, cmd, Some(vec![arg]), None)
}
fn array(self, arg: Vec<&ToTerm>) -> Self {
let cmd = self.into();
let args: Vec<Term> = arg.iter()
.map(|a| a.to_term())
.collect();
command!(TT::MAKE_ARRAY, cmd, Some(args), None)
}
}
#[test]
fn test_commands_can_be_chained() {
impl Command for Term { }
let r = Term::new();
let term = r.db("heroes").table("marvel").map(|row| row.get_field("first_appearance"));
panic!(format!("{:?}", term));
}
Encode simple terms
//! RethinkDB protocol implementation in Rust
extern crate protobuf;
extern crate serde_json;
pub mod proto;
use serde_json::value::{Value, ToJson};
use protobuf::repeated::RepeatedField;
use protobuf::ProtobufEnum;
use proto::{
Term, Datum,
Datum_DatumType as DT,
Term_TermType as TT,
};
impl Term {
fn is_empty(&self) -> bool {
*self == Term::new()
}
fn is_datum(&self) -> bool {
self.get_field_type() == TT::DATUM
}
fn encode(&self) -> String {
let mut res = String::new();
if !self.is_datum() {
res.push_str(&format!("[{},", self.get_field_type().value()));
}
let terms = self.get_args();
if !terms.is_empty() {
let mut args = String::from("[");
for term in terms {
args.push_str(&format!("{},", term.encode()));
}
args = args.trim_right_matches(",").to_string();
args.push_str("]");
res.push_str(&args);
}
if self.has_datum() {
let datum = self.get_datum();
match datum.get_field_type() {
DT::R_NULL => {
unimplemented!();
},
DT::R_BOOL => {
if datum.has_r_bool() {
res.push_str(&format!("{:?}", datum.get_r_bool()));
} else {
unimplemented!();
}
},
DT::R_NUM => {
if datum.has_r_num() {
res.push_str(&format!("{}", datum.get_r_num()));
} else {
unimplemented!();
}
},
DT::R_STR => {
if datum.has_r_str() {
res.push_str(&format!("{:?}", datum.get_r_str()));
} else {
unimplemented!();
}
},
DT::R_ARRAY => {
unimplemented!();
},
DT::R_OBJECT => {
unimplemented!();
},
DT::R_JSON => {
unimplemented!();
},
}
}
if !self.is_datum() {
res.push_str("]");
}
res
}
}
pub trait ToTerm {
fn to_term(&self) -> Term;
}
impl<T: ToJson> ToTerm for T {
fn to_term(&self) -> Term {
// Datum
let mut datum = Datum::new();
match self.to_json() {
Value::String(val) => {
datum.set_field_type(DT::R_STR);
datum.set_r_str(val);
},
Value::Bool(val) => {
datum.set_field_type(DT::R_BOOL);
datum.set_r_bool(val);
},
Value::I64(val) => {
datum.set_field_type(DT::R_NUM);
datum.set_r_num(val as f64);
},
Value::U64(val) => {
datum.set_field_type(DT::R_NUM);
datum.set_r_num(val as f64);
},
Value::F64(val) => {
datum.set_field_type(DT::R_NUM);
datum.set_r_num(val);
},
Value::Array(_val) => {
unimplemented!();
},
Value::Object(_val) => {
unimplemented!();
},
Value::Null => {
datum.set_field_type(DT::R_NULL);
},
}
// Term
let mut term = Term::new();
term.set_field_type(TT::DATUM);
term.set_datum(datum);
term
}
}
impl ToTerm for Term {
fn to_term(&self) -> Term {
self.clone()
}
}
macro_rules! command {
($T:expr, $cmd:expr, $args:expr, $opts:expr) => {{
let mut term = Term::new();
term.set_field_type($T);
let mut args = Vec::new();
if !$cmd.is_empty() {
args.push($cmd);
}
if let Some(list) = $args {
args.extend(list);
}
/*
if let Some(_opt) = $opts {
unimplemented!();
}
*/
if !args.is_empty() {
let args = RepeatedField::from_vec(args);
term.set_args(args);
}
From::from(term)
}}
}
macro_rules! closure_par {
() => {{
// ID
let mut id = Datum::new();
id.set_field_type(DT::R_NUM);
id.set_r_num(1.0);
// DATUM
let mut datum = Term::new();
datum.set_field_type(TT::DATUM);
datum.set_datum(id);
// VAR
let mut var = Term::new();
var.set_field_type(TT::VAR);
let args = RepeatedField::from_vec(vec![datum]);
var.set_args(args);
From::from(var)
}}
}
macro_rules! closure_arg {
($res:expr) => {{
// ID
let mut id = Datum::new();
id.set_field_type(DT::R_NUM);
id.set_r_num(1.0);
// ARRAY
let mut array = Datum::new();
array.set_field_type(DT::R_ARRAY);
let args = RepeatedField::from_vec(vec![id]);
array.set_r_array(args);
// DATUM
let mut datum = Term::new();
datum.set_field_type(TT::DATUM);
datum.set_datum(array);
// FUNC
let mut func = Term::new();
func.set_field_type(TT::FUNC);
let args = RepeatedField::from_vec(vec![datum, $res]);
func.set_args(args);
func
}}
}
pub trait Command
where Self: Sized + From<Term> + Into<Term>
{
fn db<T: ToTerm>(self, arg: T) -> Self {
let cmd = self.into();
let arg = arg.to_term();
command!(TT::DB, cmd, Some(vec![arg]), None)
}
fn table<T: ToTerm>(self, arg: T) -> Self {
let cmd = self.into();
let arg = arg.to_term();
command!(TT::TABLE, cmd, Some(vec![arg]), None)
}
fn get_field<T: ToTerm>(self, arg: T) -> Self {
let cmd = self.into();
let arg = arg.to_term();
command!(TT::GET_FIELD, cmd, Some(vec![arg]), None)
}
fn map<F>(self, func: F) -> Self
where F: Fn(Self) -> Self
{
let cmd = self.into();
let arg = func(closure_par!()).into();
command!(TT::MAP, cmd, Some(vec![arg]), None)
}
fn array(self, arg: Vec<&ToTerm>) -> Self {
let cmd = self.into();
let args: Vec<Term> = arg.iter()
.map(|a| a.to_term())
.collect();
command!(TT::MAKE_ARRAY, cmd, Some(args), None)
}
}
#[test]
fn test_commands_can_be_chained() {
impl Command for Term { }
let r = Term::new();
let term = r.db("heroes").table("marvel").map(|row| row.get_field("first_appearance"));
let term: Term = term.into();
panic!(format!("{:?}\n\n{}", term, term.encode()));
}
|
extern crate iron;
extern crate r2d2;
extern crate r2d2_postgres;
extern crate postgres;
use iron::prelude::*;
use iron::{typemap, BeforeMiddleware};
use std::sync::Arc;
use std::default::Default;
use postgres::{SslMode};
use r2d2_postgres::PostgresConnectionManager;
pub struct PostgresMiddleware {
pub pool: Arc<r2d2::Pool<r2d2_postgres::PostgresConnectionManager, r2d2::LoggingErrorHandler>>,
}
struct Value(Arc<r2d2::Pool<r2d2_postgres::PostgresConnectionManager, r2d2::LoggingErrorHandler>>);
impl typemap::Key for PostgresMiddleware { type Value = Value; }
impl PostgresMiddleware {
pub fn new(pg_connection_str: &str) -> PostgresMiddleware {
let config = Default::default();
let manager = PostgresConnectionManager::new(pg_connection_str, SslMode::None);
let error_handler = r2d2::LoggingErrorHandler;
let pool = Arc::new(r2d2::Pool::new(config, manager, error_handler).unwrap());
PostgresMiddleware {
pool: pool,
}
}
}
impl BeforeMiddleware for PostgresMiddleware {
fn before(&self, req: &mut Request) -> IronResult<()> {
req.extensions.insert::<PostgresMiddleware>(Value(self.pool.clone()));
Ok(())
}
}
pub trait PostgresReqExt {
fn db_conn(&self) -> r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager,
r2d2::LoggingErrorHandler>;
}
impl<'a> PostgresReqExt for Request<'a> {
fn db_conn(&self) -> r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager,
r2d2::LoggingErrorHandler> {
let poll_value = self.extensions.get::<PostgresMiddleware>().unwrap();
let &Value(ref poll) = poll_value;
return poll.get().unwrap();
}
}
Update for changes in r2d2
extern crate iron;
extern crate r2d2;
extern crate r2d2_postgres;
extern crate postgres;
use iron::prelude::*;
use iron::{typemap, BeforeMiddleware};
use std::sync::Arc;
use std::default::Default;
use postgres::{SslMode};
use r2d2_postgres::PostgresConnectionManager;
pub struct PostgresMiddleware {
pub pool: Arc<r2d2::Pool<r2d2_postgres::PostgresConnectionManager>>,
}
struct Value(Arc<r2d2::Pool<r2d2_postgres::PostgresConnectionManager>>);
impl typemap::Key for PostgresMiddleware { type Value = Value; }
impl PostgresMiddleware {
pub fn new(pg_connection_str: &str) -> PostgresMiddleware {
let config = Default::default();
let manager = PostgresConnectionManager::new(pg_connection_str, SslMode::None);
let error_handler = r2d2::LoggingErrorHandler;
let pool = Arc::new(r2d2::Pool::new(config, manager, Box::new(error_handler)).unwrap());
PostgresMiddleware {
pool: pool,
}
}
}
impl BeforeMiddleware for PostgresMiddleware {
fn before(&self, req: &mut Request) -> IronResult<()> {
req.extensions.insert::<PostgresMiddleware>(Value(self.pool.clone()));
Ok(())
}
}
pub trait PostgresReqExt {
fn db_conn(&self) -> r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager>;
}
impl<'a> PostgresReqExt for Request<'a> {
fn db_conn(&self) -> r2d2::PooledConnection<r2d2_postgres::PostgresConnectionManager> {
let poll_value = self.extensions.get::<PostgresMiddleware>().unwrap();
let &Value(ref poll) = poll_value;
return poll.get().unwrap();
}
}
|
//! This crate provides Rust bindings for the
//! [`TensorFlow`](https://www.tensorflow.org) machine learning library.
//!
//! If you aren't sure how to use something, please see the
//! [examples](https://github.com/tensorflow/rust/tree/master/examples) folder.
#![warn(
missing_copy_implementations,
missing_debug_implementations,
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications
)]
use half::f16;
use libc::{c_int, c_uint};
use num_complex::Complex;
use std::cell::Cell;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::error::Error;
use std::ffi::CStr;
use std::ffi::CString;
use std::ffi::IntoStringError;
use std::ffi::NulError;
use std::fmt;
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use std::marker::PhantomData;
use std::mem;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::Drop;
use std::ops::Index;
use std::os::raw::c_char;
use std::ptr;
use std::slice;
use std::str::Utf8Error;
use tensorflow_sys as tf;
////////////////////////
/// Will panic if `msg` contains an embedded 0 byte.
macro_rules! invalid_arg {
($fmt:expr) => {
crate::Status::new_set(crate::Code::InvalidArgument, $fmt).unwrap()
};
($fmt:expr, $($arg:tt)*) => ({
let msg = format!($fmt, $($arg)*);
crate::Status::new_set(crate::Code::InvalidArgument, &msg).unwrap()
});
}
////////////////////////
macro_rules! impl_new {
($name: ident, $call:ident, $doc:expr) => {
impl $name {
#[doc = $doc]
pub fn new() -> Self {
unsafe {
let inner = tf::$call();
assert!(!inner.is_null());
$name { inner: inner }
}
}
}
};
}
////////////////////////
macro_rules! impl_drop {
($name: ident, $call:ident) => {
impl Drop for $name {
fn drop(&mut self) {
unsafe {
tf::$call(self.inner);
}
}
}
};
}
////////////////////////
macro_rules! c_enum {
($c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])*
$name:ident = $num:expr),* }) => {
$(#[$enum_attr])*
#[derive(PartialEq,Eq,PartialOrd,Ord,Debug,Copy,Clone)]
pub enum $enum_name {
/// Represents an unrecognized value.
///
/// This allows such values to come from the C API and be sent back to the
/// C API without loss in case new values are added in the future.
UnrecognizedEnumValue(c_uint),
$($(#[$attr])* $name),*
}
impl $enum_name {
#[allow(dead_code)]
fn from_int(value: c_uint) -> $enum_name {
match value {
$($num => $enum_name::$name,)*
c => $enum_name::UnrecognizedEnumValue(c),
}
}
#[allow(dead_code)]
fn to_int(&self) -> c_uint {
match self {
&$enum_name::UnrecognizedEnumValue(c) => c,
$(&$enum_name::$name => $num),*
}
}
#[allow(dead_code)]
fn to_c(&self) -> tf::$c_name {
unsafe {
::std::mem::transmute(self.to_int())
}
}
#[allow(dead_code)]
fn from_c(value: tf::$c_name) -> $enum_name {
$enum_name::from_int(value as c_uint)
}
}
impl ::std::fmt::Display for $enum_name {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
match self {
$(&$enum_name::$name => f.write_str(stringify!($name)),)*
&$enum_name::UnrecognizedEnumValue(c) => write!(f, "UnrecognizedEnumValue({})", c),
}
}
}
};
($c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])*
$name:ident = $num:expr,)* }) => {
c_enum!($c_name, $(#[$enum_attr])* $enum_name { $( $(#[$attr])* $name = $num),* });
};
// Deprecated pattern.
($doc:expr, $c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])* value
$name:ident = $num:expr),* }) => {
c_enum!($c_name, #[doc = $doc] $(#[$enum_attr])*
$enum_name { $( $(#[$attr])* $name = $num),* });
};
// Deprecated pattern.
($doc:expr, $c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])* value
$name:ident = $num:expr,)* }) => {
c_enum!($c_name, #[doc = $doc] $(#[$enum_attr])*
$enum_name { $( $(#[$attr])* $name = $num),* });
}
}
////////////////////////
mod buffer;
use crate::buffer::Buffer;
mod graph;
pub use crate::graph::*;
#[cfg(feature = "experimental_training")]
mod scope;
#[cfg(feature = "experimental_training")]
pub use crate::scope::*;
mod session;
pub use crate::session::*;
pub mod expr;
pub mod io;
#[cfg(feature = "experimental_training")]
pub mod ops;
////////////////////////
c_enum!("Error values that can be returned.", TF_Code, Code {
/// Not an error; returned on success.
value Ok = 0,
/// The operation was cancelled (typically by the caller).
value Cancelled = 1,
/// Unknown error. An example of where this error may be returned is
/// if a Status value received from another address space belongs to
/// an error-space that is not known in this address space. Also
/// errors raised by APIs that do not return enough error information
/// may be converted to this error.
value Unknown = 2,
/// Client specified an invalid argument. Note that this differs
/// from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
/// that are problematic regardless of the state of the system
/// (e.g., a malformed file name).
value InvalidArgument = 3,
/// Deadline expired before operation could complete. For operations
/// that change the state of the system, this error may be returned
/// even if the operation has completed successfully. For example, a
/// successful response from a server could have been delayed long
/// enough for the deadline to expire.
value DeadlineExceeded = 4,
/// Some requested entity (e.g., file or directory) was not found.
/// For privacy reasons, this code *may* be returned when the client
/// does not have the access right to the entity.
value NotFound = 5,
/// Some entity that we attempted to create (e.g., file or directory)
/// already exists.
value AlreadyExists = 6,
/// The caller does not have permission to execute the specified
/// operation. PERMISSION_DENIED must not be used for rejections
/// caused by exhausting some resource (use RESOURCE_EXHAUSTED
/// instead for those errors). PERMISSION_DENIED must not be
/// used if the caller can not be identified (use UNAUTHENTICATED
/// instead for those errors).
value PermissionDenied = 7,
/// Some resource has been exhausted, perhaps a per-user quota, or
/// perhaps the entire file system is out of space.
value ResourceExhausted = 8,
/// Operation was rejected because the system is not in a state
/// required for the operation's execution. For example, directory
/// to be deleted may be non-empty, an rmdir operation is applied to
/// a non-directory, etc.
///
/// A litmus test that may help a service implementor in deciding
/// between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
/// (a) Use UNAVAILABLE if the client can retry just the failing call.
/// (b) Use ABORTED if the client should retry at a higher-level
/// (e.g., restarting a read-modify-write sequence).
/// (c) Use FAILED_PRECONDITION if the client should not retry until
/// the system state has been explicitly fixed. E.g., if an "rmdir"
/// fails because the directory is non-empty, FAILED_PRECONDITION
/// should be returned since the client should not retry unless
/// they have first fixed up the directory by deleting files from it.
/// (d) Use FAILED_PRECONDITION if the client performs conditional
/// REST Get/Update/Delete on a resource and the resource on the
/// server does not match the condition. E.g., conflicting
/// read-modify-write on the same resource.
value FailedPrecondition = 9,
/// The operation was aborted, typically due to a concurrency issue
/// like sequencer check failures, transaction aborts, etc.
///
/// See litmus test above for deciding between FAILED_PRECONDITION,
/// ABORTED, and UNAVAILABLE.
value Aborted = 10,
/// Operation tried to iterate past the valid input range. E.g., seeking or
/// reading past end of file.
///
/// Unlike INVALID_ARGUMENT, this error indicates a problem that may
/// be fixed if the system state changes. For example, a 32-bit file
/// system will generate INVALID_ARGUMENT if asked to read at an
/// offset that is not in the range [0,2<sup>32</sup>-1], but it will generate
/// OUT_OF_RANGE if asked to read from an offset past the current
/// file size.
///
/// There is a fair bit of overlap between FAILED_PRECONDITION and
/// OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific
/// error) when it applies so that callers who are iterating through
/// a space can easily look for an OUT_OF_RANGE error to detect when
/// they are done.
value OutOfRange = 11,
/// Operation is not implemented or not supported/enabled in this service.
value Unimplemented = 12,
/// Internal errors. Means some invariants expected by underlying
/// system has been broken. If you see one of these errors,
/// something is very broken.
value Internal = 13,
/// The service is currently unavailable. This is a most likely a
/// transient condition and may be corrected by retrying with
/// a backoff.
///
/// See litmus test above for deciding between FAILED_PRECONDITION,
/// ABORTED, and UNAVAILABLE.
value Unavailable = 14,
/// Unrecoverable data loss or corruption.
value DataLoss = 15,
/// The request does not have valid authentication credentials for the
/// operation.
value Unauthenticated = 16,
});
////////////////////////
c_enum!("Type of a single tensor element.", TF_DataType, DataType {
/// 32-bit floating point.
value Float = 1,
/// 64-bit floating point.
value Double = 2,
/// 32-bit signed integer.
value Int32 = 3,
/// 8-bit unsigned integer.
value UInt8 = 4,
/// 16-bit signed integer.
value Int16 = 5,
/// 8-bit signed integer.
value Int8 = 6,
/// String.
value String = 7,
/// Complex number composed of two 32-bit floats.
value Complex64 = 8,
/// 64-bit signed integer.
value Int64 = 9,
/// Boolean.
value Bool = 10,
/// Quantized 8-bit signed integer.
value QInt8 = 11,
/// Quantized 8-bit unsigned integer.
value QUInt8 = 12,
/// Quantized 32-bit signed integer.
value QInt32 = 13,
/// Float32 truncated to 16 bits. Only for cast ops.
value BFloat16 = 14,
/// Quantized 16-bit signed integer.
value QInt16 = 15,
/// Quantized 16-bit unsigned integer.
value QUInt16 = 16,
/// 16-bit unsigned integer.
value UInt16 = 17,
/// Complex number composed of two 64-bit floats.
value Complex128 = 18,
/// 16-bit floating point.
value Half = 19,
/// TensorFlow Resource (name, container, device,...)
value Resource = 20,
/// A dynamic type similar to std::any::Any.
value Variant = 21,
/// 32-bit unsigned integer.
value UInt32 = 22,
/// 64-bit unsigned integer.
value UInt64 = 23,
});
impl Default for DataType {
fn default() -> DataType {
DataType::Float
}
}
////////////////////////
/// Holds error information when communicating with back and forth with `tensorflow`.
///
/// It either has an `Code::Ok` code, or otherwise an error code with an associated message.
pub struct Status {
inner: *mut tf::TF_Status,
}
impl_new!(
Status,
TF_NewStatus,
"Creates a status with `Code::Ok` and no message."
);
impl_drop!(Status, TF_DeleteStatus);
impl Status {
/// Creates a status and sets its code and message.
pub fn new_set(code: Code, msg: &str) -> std::result::Result<Status, NulError> {
let mut status = Status::new();
status.set(code, msg)?;
Ok(status)
}
/// Returns the status's code.
pub fn code(&self) -> Code {
unsafe { Code::from_int(tf::TF_GetCode(self.inner) as u32) }
}
/// Returns true if the status's code is `Code::Ok`.
pub fn is_ok(&self) -> bool {
self.code() == Code::Ok
}
/// Turns the current `Status` into a `Result`.
fn into_result(self) -> Result<()> {
if self.is_ok() {
Ok(())
} else {
Err(self)
}
}
/// Sets the code and message.
pub fn set(&mut self, code: Code, msg: &str) -> std::result::Result<(), NulError> {
let message = CString::new(msg)?;
unsafe {
tf::TF_SetStatus(self.inner, code.to_c(), message.as_ptr());
}
Ok(())
}
/// Returns a mutable pointer to the inner tensorflow Status `TF_Status`.
fn inner(&mut self) -> *mut tf::TF_Status {
self.inner
}
}
impl Display for Status {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}: ", self.code())?;
let msg = unsafe {
match CStr::from_ptr(tf::TF_Message(self.inner)).to_str() {
Ok(s) => s,
Err(_) => "<invalid UTF-8 in message>",
}
};
f.write_str(msg)
}
}
impl Debug for Status {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{{inner:{:?}, ", self.inner)?;
write!(f, "{}: ", self.code())?;
let msg = unsafe {
match CStr::from_ptr(tf::TF_Message(self.inner)).to_str() {
Ok(s) => s,
Err(_) => "<invalid UTF-8 in message>",
}
};
f.write_str(msg)?;
write!(f, "}}")?;
Ok(())
}
}
impl From<NulError> for Status {
fn from(_e: NulError) -> Self {
invalid_arg!("String contained NUL byte")
}
}
impl From<Utf8Error> for Status {
fn from(_e: Utf8Error) -> Self {
invalid_arg!("String contained invalid UTF-8")
}
}
impl From<IntoStringError> for Status {
fn from(e: IntoStringError) -> Self {
invalid_arg!(
"Error converting C string to Rust string: {}",
e.description()
)
}
}
impl Error for Status {
fn description(&self) -> &str {
unsafe {
match CStr::from_ptr(tf::TF_Message(self.inner)).to_str() {
Ok(s) => s,
Err(_) => "<invalid UTF-8 in message>",
}
}
}
fn cause(&self) -> Option<&dyn Error> {
None
}
}
////////////////////////
/// Options that can be passed during session creation.
#[derive(Debug)]
pub struct SessionOptions {
inner: *mut tf::TF_SessionOptions,
}
impl SessionOptions {
/// Set the target.
///
/// `target` can be empty, a single entry, or a comma separated list of entries.
/// Each entry is in one of the following formats :
///
/// - "local"
/// - ip:port
/// - host:port
pub fn set_target(&mut self, target: &str) -> std::result::Result<(), NulError> {
let cstr = CString::new(target)?;
unsafe {
tf::TF_SetTarget(self.inner, cstr.as_ptr());
}
Ok(())
}
/// Set the config.
///
/// `config` should be a serialized [`ConfigProto` proto](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/protobuf/config.proto).
/// Returns an error if config was not parsed successfully as a `ConfigProto`.
pub fn set_config(&mut self, config: &[u8]) -> Result<()> {
let mut status = Status::new();
unsafe {
tf::TF_SetConfig(
self.inner,
config.as_ptr() as *const _,
config.len(),
status.inner(),
);
}
if status.is_ok() {
Ok(())
} else {
Err(status)
}
}
}
impl_new!(
SessionOptions,
TF_NewSessionOptions,
"Creates a blank set of options."
);
impl_drop!(SessionOptions, TF_DeleteSessionOptions);
////////////////////////
/// Convenience type for `Result` with `Status` as the error type.
pub type Result<T> = std::result::Result<T, Status>;
////////////////////////
/// A Rust type that maps to a `DataType`.
///
/// Currently, all implementors must *not* implement Drop (or transitively contain
/// anything that does) and must be bit-for-bit compatible with the corresponding C
/// type. Clients must not implement this trait.
///
/// This trait doesn't require `num::Zero` or `num::One` because some tensor
/// types (such as `bool` and `String`) don't implement them and we need to
/// supply custom implementations.
pub trait TensorType: Default + Clone + Display + Debug + 'static {
/// Tensor representation for this type. Normally `TensorDataCRepr` for types
/// that have the same representation in Rust; or `TensorDataNoCRepr` for
/// types where the Rust and C representations differ.
#[doc(hidden)]
type InnerType: TensorInner<Self>;
/// Returns the DataType that corresponds to this type.
fn data_type() -> DataType;
/// Returns the zero value.
fn zero() -> Self;
/// Returns the one value.
fn one() -> Self;
/// Return true if the data has the same representation in C and Rust and
/// can be written/read directly.
fn is_repr_c() -> bool;
/// Unpacks data from C. Returns an error if `is_repr_c()` is true for this
/// type or some other error occurred.
fn unpack(data: &[u8], count: usize) -> Result<Vec<Self>>;
/// Returns the number of bytes in the packed representation. If
/// `is_repr_c()` returns true, this will return 0.
fn packed_size(data: &[Self]) -> usize;
/// Packs data for sending to C. Returns an error if `is_repr_c()` returns
/// true for this type or some other error occurred. The size of the buffer
/// must be at least as large as the value returned by `packed_size(data)`.
fn pack(data: &[Self], buffer: &mut [u8]) -> Result<()>;
}
macro_rules! tensor_type {
($rust_type:ty, $tensor_type:ident, $zero:expr, $one:expr) => {
impl TensorType for $rust_type {
type InnerType = TensorDataCRepr<$rust_type>;
fn data_type() -> DataType {
DataType::$tensor_type
}
fn zero() -> Self {
$zero
}
fn one() -> Self {
$one
}
fn is_repr_c() -> bool {
true
}
fn unpack(_data: &[u8], _count: usize) -> Result<Vec<Self>> {
Err(Status::new_set(
Code::Unimplemented,
concat!("Unpacking is not necessary for ", stringify!($rust_type)),
)
.unwrap())
}
fn packed_size(_data: &[Self]) -> usize {
0
}
fn pack(_data: &[Self], _buffer: &mut [u8]) -> Result<()> {
Err(Status::new_set(
Code::Unimplemented,
concat!("Packing is not necessary for ", stringify!($rust_type)),
)
.unwrap())
}
}
};
}
tensor_type!(f16, Half, half::consts::ZERO, half::consts::ONE);
tensor_type!(f32, Float, 0.0, 1.0);
tensor_type!(f64, Double, 0.0, 1.0);
tensor_type!(i32, Int32, 0, 1);
tensor_type!(u8, UInt8, 0, 1);
tensor_type!(u16, UInt16, 0, 1);
tensor_type!(u32, UInt32, 0, 1);
tensor_type!(u64, UInt64, 0, 1);
tensor_type!(i16, Int16, 0, 1);
tensor_type!(i8, Int8, 0, 1);
tensor_type!(
Complex<f32>,
Complex64,
Complex::new(0.0, 0.0),
Complex::new(1.0, 0.0)
);
tensor_type!(
Complex<f64>,
Complex128,
Complex::new(0.0, 0.0),
Complex::new(1.0, 0.0)
);
tensor_type!(i64, Int64, 0, 1);
tensor_type!(bool, Bool, false, true);
macro_rules! q_type {
($rust_type:ident, $(#[$attr:meta])* type $q_type:ident) => {
$(#[$attr])*
#[derive(Clone,Copy,Default,Debug,Eq,PartialEq,Ord,PartialOrd)]
pub struct $q_type($rust_type);
impl Display for $q_type {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
<$rust_type as Display>::fmt(&self.0, f)
}
}
impl From<$rust_type> for $q_type {
fn from(x: $rust_type) -> Self {
$q_type(x)
}
}
tensor_type!($q_type, $q_type, $q_type(0), $q_type(1));
}
}
q_type!(i8,
/// Quantized type for i8.
type QInt8);
q_type!(u8,
/// Quantized type for u8.
type QUInt8);
q_type!(i16,
/// Quantized type for i16.
type QInt16);
q_type!(u16,
/// Quantized type for u16.
type QUInt16);
q_type!(i32,
/// Quantized type for i32.
type QInt32);
////////////////////////
/// BFloat16 provides a Rust type for BFloat16.
#[derive(Debug, Clone, Copy, Default)]
pub struct BFloat16(u16);
impl Display for BFloat16 {
fn fmt(&self, f: &mut Formatter<'_>) -> ::std::fmt::Result {
let val: f32 = (*self).into();
Display::fmt(&val, f)
}
}
impl Into<f32> for BFloat16 {
fn into(self) -> f32 {
unsafe {
// Assumes that the architecture uses IEEE-754 natively for floats
// and twos-complement for integers.
mem::transmute::<u32, f32>((self.0 as u32) << 16)
}
}
}
impl From<f32> for BFloat16 {
fn from(value: f32) -> Self {
unsafe {
// Assumes that the architecture uses IEEE-754 natively for floats
// and twos-complement for integers.
BFloat16((mem::transmute::<f32, u32>(value) >> 16) as u16)
}
}
}
impl PartialEq for BFloat16 {
fn eq(&self, other: &BFloat16) -> bool {
let x: f32 = (*self).into();
let y: f32 = (*other).into();
x.eq(&y)
}
}
impl PartialOrd for BFloat16 {
fn partial_cmp(&self, other: &BFloat16) -> Option<Ordering> {
let x: f32 = (*self).into();
let y: f32 = (*other).into();
x.partial_cmp(&y)
}
}
tensor_type!(
BFloat16,
BFloat16,
BFloat16::from(0.0f32),
BFloat16::from(1.0f32)
);
////////////////////////
impl TensorType for String {
type InnerType = TensorDataNoCRepr<String>;
fn data_type() -> DataType {
DataType::String
}
fn zero() -> Self {
"".to_string()
}
fn one() -> Self {
"\u{0001}".to_string()
}
fn is_repr_c() -> bool {
false
}
fn unpack(data: &[u8], count: usize) -> Result<Vec<Self>> {
let offsets = unsafe { slice::from_raw_parts(data.as_ptr() as *const u64, count) };
let mut out = Vec::with_capacity(count);
let mut status = Status::new();
let base_offset = mem::size_of::<u64>() * count;
for offset in offsets {
let off = *offset as usize + base_offset;
#[allow(trivial_casts)]
let src = &data[off] as *const u8 as *const c_char;
let src_len = data.len() - off;
let mut dst_len: usize = 0;
let mut dst: *const c_char = ptr::null();
unsafe {
tf::TF_StringDecode(src, src_len, &mut dst, &mut dst_len, status.inner());
}
if !status.is_ok() {
return Err(status);
}
let string_data = unsafe { slice::from_raw_parts(dst as *const u8, dst_len) };
out.push(std::str::from_utf8(string_data)?.to_string());
}
Ok(out)
}
fn packed_size(data: &[Self]) -> usize {
let string_data: usize = data
.iter()
.map(|s| unsafe { tf::TF_StringEncodedSize(s.len()) })
.sum();
mem::size_of::<u64>() * data.len() + string_data
}
fn pack(data: &[Self], buffer: &mut [u8]) -> Result<()> {
let offsets: &mut [u64] =
unsafe { slice::from_raw_parts_mut(buffer.as_mut_ptr() as *mut u64, data.len()) };
let base_offset = mem::size_of::<u64>() * data.len();
let mut offset = base_offset;
let mut status = Status::new();
for i in 0..data.len() {
offsets[i] = (offset - base_offset) as u64;
let src = data[i].as_ptr() as *const c_char;
let src_len = data[i].len();
let dst: *mut u8 = &mut buffer[offset];
let dst_len = buffer.len() - offset;
offset += unsafe {
tf::TF_StringEncode(src, src_len, dst as *mut c_char, dst_len, status.inner())
};
if !status.is_ok() {
return Err(status);
}
}
Ok(())
}
}
////////////////////////
trait AnyTensor: Debug {
fn inner(&self) -> Result<*mut tf::TF_Tensor>;
}
////////////////////////
unsafe fn tensor_dims(tensor: *mut tf::TF_Tensor) -> Vec<u64> {
let mut dims = Vec::with_capacity(tf::TF_NumDims(tensor) as usize);
for i in 0..dims.capacity() {
dims.push(tf::TF_Dim(tensor, i as c_int) as u64);
}
dims
}
/// Inner representation of `Tensor`s.
#[doc(hidden)]
pub trait TensorInner<T>: Debug + Clone
where
Self: Sized + Deref<Target = [T]> + DerefMut<Target = [T]>,
{
/// Return the inner representation of a tensor with the given
/// dimensions.
fn new_inner(dims: &[u64]) -> Self;
/// Wraps a TF_Tensor. Returns None if types don't match.
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self>;
/// Return a mutable pointer to the C tensor.
fn as_mut_ptr(&self, dims: &Vec<u64>) -> Result<*mut tf::TF_Tensor>;
}
////////////////////////
/// Inner representation for `Tensor`s of types where C and Rust have the
/// same representation.
#[derive(Debug)]
#[doc(hidden)]
pub struct TensorDataCRepr<T>
where
T: TensorType,
{
inner: *mut tf::TF_Tensor,
/// Equal to the product of the tensor's dimensions.
data_count: usize,
phantom: PhantomData<T>,
}
unsafe impl<T> Send for TensorDataCRepr<T> where T: TensorType {}
unsafe impl<T> Sync for TensorDataCRepr<T> where T: TensorType {}
impl<T: TensorType> Drop for TensorDataCRepr<T> {
fn drop(&mut self) {
if !self.inner.is_null() {
unsafe {
tf::TF_DeleteTensor(self.inner);
}
}
self.inner = ptr::null_mut();
}
}
impl<T> TensorInner<T> for TensorDataCRepr<T>
where
T: Debug + TensorType + Copy,
{
fn new_inner(dims: &[u64]) -> Self {
let total = product(dims) as usize;
unsafe {
let inner = tf::TF_AllocateTensor(
T::data_type().to_c(),
dims.as_ptr() as *const _,
dims.len() as c_int,
total * mem::size_of::<T>(),
);
// Zero-initialize allocated memory.
let data = tf::TF_TensorData(inner);
let byte_size = tf::TF_TensorByteSize(inner);
libc::memset(data as *mut libc::c_void, 0, byte_size);
TensorDataCRepr {
inner,
data_count: total,
phantom: PhantomData,
}
}
}
// Wraps a TF_Tensor. Returns None if types don't match.
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self> {
if DataType::from_c(tf::TF_TensorType(tensor)) != T::data_type() {
return None;
}
Some(TensorDataCRepr {
inner: tensor,
data_count: product(&tensor_dims(tensor)) as usize,
phantom: PhantomData,
})
}
fn as_mut_ptr(&self, _dims: &Vec<u64>) -> Result<*mut tf::TF_Tensor> {
assert!(!self.inner.is_null());
Ok(self.inner)
}
}
impl<T: TensorType> Deref for TensorDataCRepr<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
let data = unsafe { tf::TF_TensorData(self.inner) } as *mut T;
unsafe { slice::from_raw_parts(data, self.data_count) }
}
}
impl<T: TensorType> DerefMut for TensorDataCRepr<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
let data = unsafe { tf::TF_TensorData(self.inner) } as *mut T;
unsafe { slice::from_raw_parts_mut(data, self.data_count) }
}
}
impl<T: TensorType + Copy> Clone for TensorDataCRepr<T> {
fn clone(&self) -> Self {
let (inner, total) = unsafe {
let dims = tensor_dims(self.inner);
let total = product(&dims) as usize;
let inner = tf::TF_AllocateTensor(
T::data_type().to_c(),
dims.as_ptr() as *const _,
dims.len() as c_int,
total * mem::size_of::<T>(),
);
(inner, total)
};
let mut clone = TensorDataCRepr {
inner,
data_count: total,
phantom: PhantomData,
};
clone.deref_mut().copy_from_slice(self.deref());
clone
}
}
////////////////////////
/// Inner representation for `Tensor`s of types where C and Rust have
/// different representations.
#[derive(Debug)]
#[doc(hidden)]
pub struct TensorDataNoCRepr<T>
where
T: TensorType,
{
inner: Cell<*mut tf::TF_Tensor>,
/// Points to either the TF_Tensor data or the contents of `unpacked_data`.
data: Cell<*mut T>,
/// Equal to the product of the tensor's dimensions.
data_count: usize,
unpacked: Cell<bool>,
/// This is just an easy way to handle deallocation correctly. According to
/// the aliasing rules, we shouldn't touch this data because it can be
/// modified through `data`.
unpacked_data: RefCell<Option<Vec<T>>>,
}
impl<T> TensorInner<T> for TensorDataNoCRepr<T>
where
T: Debug + TensorType,
{
/// Creates a new tensor.
///
/// The data is initialized to zeros.
fn new_inner(dims: &[u64]) -> Self {
let total = product(dims) as usize;
let mut data = Vec::with_capacity(total);
data.resize(total, T::zero());
TensorDataNoCRepr {
inner: Cell::new(ptr::null_mut()),
data: Cell::new(data.as_mut_ptr()),
data_count: total,
unpacked: Cell::new(true),
unpacked_data: RefCell::new(Some(data)),
}
}
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self> {
if DataType::from_c(tf::TF_TensorType(tensor)) != T::data_type() {
return None;
}
Some(TensorDataNoCRepr {
inner: Cell::new(tensor),
data: Cell::new(tf::TF_TensorData(tensor) as *mut _),
data_count: product(&tensor_dims(tensor)) as usize,
unpacked: Cell::new(false),
unpacked_data: RefCell::new(None),
})
}
fn as_mut_ptr(&self, dims: &Vec<u64>) -> Result<*mut tf::TF_Tensor> {
let mut inner = self.inner.get();
if inner.is_null() {
let data: &[T] = self;
let packed_size = T::packed_size(data);
inner = unsafe {
let inner = tf::TF_AllocateTensor(
T::data_type().to_c(),
dims.as_ptr() as *const _,
dims.len() as c_int,
packed_size,
);
let buf =
slice::from_raw_parts_mut(tf::TF_TensorData(inner) as *mut u8, packed_size);
T::pack(data, buf)?;
inner
};
self.inner.set(inner);
}
Ok(inner)
}
}
impl<T: TensorType> Drop for TensorDataNoCRepr<T> {
fn drop(&mut self) {
self.drop_tensor();
}
}
impl<T> TensorDataNoCRepr<T>
where
T: TensorType,
{
// This will panic if `unpacked` is false and `unpacked_data` is already borrowed.
#[allow(trivial_numeric_casts)]
fn unpack(&self) {
if !self.unpacked.get() {
let mut data = self.unpacked_data.borrow_mut();
let tensor = self.inner.get();
let bytes = unsafe {
slice::from_raw_parts(
tf::TF_TensorData(tensor) as *const u8,
tf::TF_TensorByteSize(tensor) as usize,
)
};
// The unwrap() may panic (e.g. if a string contains a 0 byte),
// but there's nothing we can do. This function is always
// called from contexts that don't allow us to return an error.
let mut unpacked = T::unpack(bytes, self.data_count).unwrap();
assert_eq!(unpacked.len(), self.data_count);
self.data.set(unpacked.as_mut_ptr());
*data = Some(unpacked);
self.unpacked.set(true);
}
}
fn drop_tensor(&self) {
let inner = self.inner.get();
if !inner.is_null() {
unsafe {
tf::TF_DeleteTensor(inner);
}
}
self.inner.set(ptr::null_mut());
}
}
impl<T: TensorType> Deref for TensorDataNoCRepr<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
self.unpack();
unsafe { slice::from_raw_parts(self.data.get(), self.data_count) }
}
}
impl<T: TensorType> DerefMut for TensorDataNoCRepr<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
self.unpack();
// If the slice is modified, the tensor is stale.
self.drop_tensor();
unsafe { slice::from_raw_parts_mut(self.data.get(), self.data_count) }
}
}
impl<T: TensorType> Clone for TensorDataNoCRepr<T> {
fn clone(&self) -> Self {
let dims = unsafe { tensor_dims(self.inner.get()) };
let mut clone = TensorDataNoCRepr::new_inner(&dims);
clone.deref_mut().clone_from_slice(self.deref());
clone
}
}
/// Holds a multi-dimensional array of elements of a single data type.
///
/// The data buffer stores elements in row major order. E.g. if data is treated
/// as a vector of `T`:
///
/// ```text
/// element 0: index (0, ..., 0)
/// element 1: index (0, ..., 1)
/// ...
/// ```
#[derive(Debug, Clone, Eq)]
pub struct Tensor<T: TensorType> {
inner: T::InnerType,
dims: Vec<u64>,
}
#[inline]
fn product(values: &[u64]) -> u64 {
values.iter().product()
}
impl<T: TensorType> Tensor<T> {
/// Creates a new tensor.
///
/// The data is initialized to zeros.
pub fn new(dims: &[u64]) -> Self {
Tensor {
inner: T::InnerType::new_inner(dims),
dims: Vec::from(dims),
}
}
/// Sets (copies) the tensor values to the provided ones.
///
/// ```
/// # use tensorflow::Tensor;
/// let a = Tensor::new(&[2, 2]).with_values(&[0_i32, 1, 2, 3]).unwrap();
/// ```
pub fn with_values(mut self, value: &[T]) -> Result<Self> {
if self.len() != value.len() {
return Err(invalid_arg!(
"length of values array ({}) is not equal to tensor total elements ({})",
value.len(),
self.len()
));
}
for (e, v) in self.iter_mut().zip(value) {
e.clone_from(v);
}
Ok(self)
}
/// Returns the tensor's dimensions.
pub fn dims(&self) -> &[u64] {
&self.dims
}
// Wraps a TF_Tensor. Returns None if types don't match.
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self> {
let mut dims = Vec::with_capacity(tf::TF_NumDims(tensor) as usize);
for i in 0..dims.capacity() {
dims.push(tf::TF_Dim(tensor, i as c_int) as u64);
}
Some(Tensor {
inner: T::InnerType::from_tf_tensor(tensor)?,
dims: dims,
})
}
}
impl<T: TensorType> AnyTensor for Tensor<T> {
fn inner(&self) -> Result<*mut tf::TF_Tensor> {
self.inner.as_mut_ptr(&self.dims)
}
}
impl<T: TensorType> Deref for Tensor<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
self.inner.deref()
}
}
impl<T: TensorType> DerefMut for Tensor<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
self.inner.deref_mut()
}
}
impl<T: TensorType> From<T> for Tensor<T> {
fn from(value: T) -> Self {
let mut tensor = Tensor::new(&[]);
tensor[0] = value;
tensor
}
}
impl<'a, T: TensorType> From<&'a [T]> for Tensor<T> {
fn from(value: &'a [T]) -> Self {
let mut tensor: Tensor<T> = Tensor::new(&[value.len() as u64]);
for (e, v) in tensor.iter_mut().zip(value) {
e.clone_from(v);
}
tensor
}
}
impl<T: TensorType + PartialEq> PartialEq for Tensor<T> {
fn eq(&self, other: &Tensor<T>) -> bool {
self.dims == other.dims && self.deref() == other.deref()
}
}
fn write_tensor_recursive<T: Display>(
f: &mut Formatter<'_>,
shape: &[u64],
values: &[T],
) -> ::std::fmt::Result {
if shape.len() == 0 {
// Handle special case of a scalar tensor.
write!(f, "{}", values[0])
} else {
// Recur with values split into chunks of the next dims size,
// Surround with brackets and separate with comma.
write!(f, "[")?;
if shape[0] > 0 {
let chunk_size = values.len() / shape[0] as usize;
for i in 0..shape[0] as usize {
if i != 0 {
write!(f, ", ")?;
}
write_tensor_recursive(
f,
&shape[1..],
&values[i * chunk_size..(i + 1) * chunk_size],
)?;
}
}
write!(f, "]")
}
}
impl<T: TensorType> Display for Tensor<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> ::std::fmt::Result {
write_tensor_recursive(f, &self.dims, self)
}
}
////////////////////////
/// Dynamically loaded plugins.
/// The C API doesn't provide a way to unload libraries, so nothing happens when this
/// goes out of scope.
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct Library {
inner: *mut tf::TF_Library,
}
impl Library {
/// Loads a library.
pub fn load(library_filename: &str) -> Result<Self> {
let c_filename = CString::new(library_filename)?;
let mut status = Status::new();
let inner = unsafe { tf::TF_LoadLibrary(c_filename.as_ptr(), status.inner()) };
if inner.is_null() {
Err(status)
} else {
Ok(Library { inner: inner })
}
}
// TODO: Implement TF_GetOpList once we can deserialize protos.
}
////////////////////////
/// Returns a string describing version information of the
/// `TensorFlow` library. `TensorFlow` is using semantic versioning.
pub fn version() -> std::result::Result<String, Utf8Error> {
unsafe {
CStr::from_ptr(tf::TF_Version())
.to_str()
.map(|s| s.to_string())
}
}
/// Returns a serialized KernelList protocol buffer containing KernelDefs for
/// all registered kernels.
pub fn get_all_registered_kernels() -> Result<Vec<u8>> {
let mut status = Status::new();
let buf = unsafe {
let buf = tf::TF_GetAllRegisteredKernels(status.inner());
if !status.is_ok() {
return Err(status);
}
Buffer::<u8>::from_c(buf, true)
};
Ok(Vec::from(buf.as_ref()))
}
/// Returns a serialized KernelList protocol buffer containing KernelDefs for
/// all kernels registered for the operation named `name`.
pub fn get_registered_kernels_for_op(name: &str) -> Result<Vec<u8>> {
let c_name = CString::new(name)?;
let mut status = Status::new();
let buf = unsafe {
let buf = tf::TF_GetRegisteredKernelsForOp(c_name.as_ptr(), status.inner());
if !status.is_ok() {
return Err(status);
}
Buffer::<u8>::from_c(buf, true)
};
Ok(Vec::from(buf.as_ref()))
}
////////////////////////
/// A Shape is the shape of a tensor. A Shape may be an unknown rank, or it may
/// have a known rank with each dimension being known or unknown.
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Hash, Clone, Default)]
pub struct Shape(Option<Vec<Option<i64>>>);
impl Shape {
/// Creates a new Shape.
pub fn new(s: Option<Vec<Option<i64>>>) -> Shape {
Shape(s)
}
/// Returns the number of dimensions if known, or None if unknown.
pub fn dims(&self) -> Option<usize> {
match *self {
Shape(None) => None,
Shape(Some(ref v)) => Some(v.len()),
}
}
}
impl From<Option<Vec<Option<i64>>>> for Shape {
fn from(data: Option<Vec<Option<i64>>>) -> Shape {
Shape(data)
}
}
impl Into<Option<Vec<Option<i64>>>> for Shape {
fn into(self) -> Option<Vec<Option<i64>>> {
self.0
}
}
static UNKNOWN_DIMENSION: Option<i64> = None;
impl Index<usize> for Shape {
type Output = Option<i64>;
fn index(&self, index: usize) -> &Option<i64> {
match self.0 {
None => &UNKNOWN_DIMENSION,
Some(ref v) => {
if index < v.len() {
&v[index]
} else {
&UNKNOWN_DIMENSION
}
}
}
}
}
impl Display for Shape {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
////////////////////////
mod while_loop;
pub use crate::while_loop::*;
////////////////////////
#[cfg(test)]
mod tests {
use super::*;
fn create_session() -> (Session, Graph) {
let graph = Graph::new();
let options = SessionOptions::new();
match Session::new(&options, &graph) {
Ok(session) => (session, graph),
Err(status) => panic!("Creating session failed with status: {}", status),
}
}
#[test]
fn smoke() {
create_session();
}
#[test]
fn test_close() {
let (mut session, _) = create_session();
let status = session.close();
assert!(status.is_ok());
}
#[test]
fn test_tensor() {
let mut tensor = <Tensor<f32>>::new(&[2, 3]);
assert_eq!(tensor.len(), 6);
tensor[0] = 1.0;
}
#[test]
fn test_tensor_native_type_zero() {
let tensor = <Tensor<i32>>::new(&[1000]);
// Checking against null-initialized slice/vector makes
// the unit test succeed often on repeated runs.
for v in tensor.as_ref() {
assert_eq!(0, *v);
}
}
#[test]
fn test_set_target() {
let mut options = SessionOptions::new();
options.set_target("local").unwrap();
}
#[test]
fn test_set_config() {
let mut options = SessionOptions::new();
// An empty array is a valid proto, since all fields are optional.
options.set_config(&vec![]).unwrap();
}
#[test]
fn test_run() {
// Graph is just y = 2 * x
let graph_proto = vec![
0x0a, 0x2a, 0x0a, 0x01, 0x78, 0x12, 0x0b, 0x50, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f,
0x6c, 0x64, 0x65, 0x72, 0x2a, 0x0b, 0x0a, 0x05, 0x64, 0x74, 0x79, 0x70, 0x65, 0x12,
0x02, 0x30, 0x01, 0x2a, 0x0b, 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x02,
0x3a, 0x00, 0x0a, 0x30, 0x0a, 0x03, 0x79, 0x2f, 0x79, 0x12, 0x05, 0x43, 0x6f, 0x6e,
0x73, 0x74, 0x2a, 0x0b, 0x0a, 0x05, 0x64, 0x74, 0x79, 0x70, 0x65, 0x12, 0x02, 0x30,
0x01, 0x2a, 0x15, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x0c, 0x42, 0x0a,
0x08, 0x01, 0x12, 0x00, 0x2a, 0x04, 0x00, 0x00, 0x00, 0x40, 0x0a, 0x19, 0x0a, 0x01,
0x79, 0x12, 0x03, 0x4d, 0x75, 0x6c, 0x1a, 0x01, 0x78, 0x1a, 0x03, 0x79, 0x2f, 0x79,
0x2a, 0x07, 0x0a, 0x01, 0x54, 0x12, 0x02, 0x30, 0x01,
];
let (session, mut graph) = create_session();
let opts = ImportGraphDefOptions::new();
let status = graph.import_graph_def(&graph_proto, &opts);
assert!(status.is_ok());
let mut x = <Tensor<f32>>::new(&[2]);
x[0] = 2.0;
x[1] = 3.0;
let mut step = SessionRunArgs::new();
let x_op = graph.operation_by_name_required("x").unwrap();
step.add_feed(&x_op, 0, &x);
let y_op = graph.operation_by_name_required("y").unwrap();
let output_ix = step.request_fetch(&y_op, 0);
session.run(&mut step).unwrap();
let output_tensor = step.fetch::<f32>(output_ix).unwrap();
assert_eq!(output_tensor.len(), 2);
assert_eq!(output_tensor[0], 4.0);
assert_eq!(output_tensor[1], 6.0);
}
#[test]
fn test_bfloat16() {
let data = [-1.0f32, 0.0, 1.0, 2.5];
for i in 0..data.len() {
let x = data[i];
let bfx = BFloat16::from(x);
assert_eq!(<BFloat16 as Into<f32>>::into(bfx), x);
assert_eq!(bfx.partial_cmp(&bfx), Some(Ordering::Equal));
assert!(bfx.eq(&bfx));
for j in 0..i {
let y = data[j];
let bfy = BFloat16::from(y);
assert_eq!(bfx.partial_cmp(&bfy), Some(Ordering::Greater));
assert_eq!(bfy.partial_cmp(&bfx), Some(Ordering::Less));
assert!(!bfx.eq(&bfy));
}
}
assert_eq!(<BFloat16 as Into<f32>>::into(BFloat16::default()), 0.0f32);
assert_eq!(BFloat16::from(1.5f32).to_string(), "1.5");
}
#[test]
fn test_f16() {
let data: Vec<f16> = vec![-1.0f32, 0.0, 1.0, 2.5]
.into_iter()
.map(|x| f16::from_f32(x))
.collect();
let tensor = <Tensor<f16>>::new(&[2, 2]).with_values(&data).unwrap();
assert_eq!(&tensor[..], &data[..]);
}
#[test]
fn test_strings() {
let mut g = Graph::new();
let x_op = {
let mut nd = g.new_operation("Placeholder", "x").unwrap();
nd.set_attr_type("dtype", DataType::String).unwrap();
nd.set_attr_shape("shape", &Shape(Some(vec![]))).unwrap();
nd.finish().unwrap()
};
let y_op = {
let mut nd = g.new_operation("EncodeBase64", "y").unwrap();
nd.add_input(x_op.clone());
nd.finish().unwrap()
};
let options = SessionOptions::new();
let session = Session::new(&options, &g).unwrap();
let mut x = <Tensor<String>>::new(&[2]);
x[0] = "foo".to_string();
x[1] = "bar".to_string();
let mut step = SessionRunArgs::new();
step.add_feed(&x_op, 0, &x);
let output_ix = step.request_fetch(&y_op, 0);
session.run(&mut step).unwrap();
let output_tensor = step.fetch::<String>(output_ix).unwrap();
assert_eq!(output_tensor.len(), 2);
assert_eq!(output_tensor[0], "Zm9v");
assert_eq!(output_tensor[1], "YmFy");
}
#[test]
fn tensor_clone() {
let x = Tensor::<i32>::new(&[3]).with_values(&[1, 2, 3]).unwrap();
let clone = x.clone();
assert_eq!(x, clone);
}
#[test]
fn tensor_eq() {
let a = Tensor::<i32>::new(&[3]).with_values(&[1, 2, 3]).unwrap();
let b = Tensor::<i32>::from(&[1, 2, 3][..]);
let c = Tensor::<i32>::new(&[3]).with_values(&[1, 2, 4]).unwrap();
let d = Tensor::<i32>::new(&[3, 1]).with_values(&[1, 2, 3]).unwrap();
assert_eq!(a, b);
assert_ne!(a, c);
assert_ne!(a, d);
}
#[test]
fn tensor_display() {
let tests = [
("1", &[][..], &[1][..]),
("[1]", &[1], &[1]),
("[1, 2]", &[2], &[1, 2]),
("[[1, 2], [3, 4]]", &[2, 2], &[1, 2, 3, 4]),
("[[[1], [2]], [[3], [4]]]", &[2, 2, 1], &[1, 2, 3, 4]),
("[[[1, 2]], [[3, 4]]]", &[2, 1, 2], &[1, 2, 3, 4]),
("[[[[], []]], [[[], []]]]", &[2, 1, 2, 0], &[]),
("[[], []]", &[2, 0], &[]),
("[[], []]", &[2, 0, 2], &[]),
("[]", &[0], &[]),
("[]", &[0, 0], &[]),
];
for &(expected, shape, values) in tests.iter() {
let tensor = Tensor::<i32>::new(shape).with_values(values).unwrap();
assert_eq!(expected, format!("{}", tensor));
}
}
#[test]
fn test_get_all_registered_kernels() {
assert!(get_all_registered_kernels().unwrap().len() > 0);
}
#[test]
fn test_get_registered_kernels_for_op() {
assert!(get_registered_kernels_for_op("Add").unwrap().len() > 0);
}
}
Add Tensor::shape
//! This crate provides Rust bindings for the
//! [`TensorFlow`](https://www.tensorflow.org) machine learning library.
//!
//! If you aren't sure how to use something, please see the
//! [examples](https://github.com/tensorflow/rust/tree/master/examples) folder.
#![warn(
missing_copy_implementations,
missing_debug_implementations,
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications
)]
use half::f16;
use libc::{c_int, c_uint};
use num_complex::Complex;
use std::cell::Cell;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::error::Error;
use std::ffi::CStr;
use std::ffi::CString;
use std::ffi::IntoStringError;
use std::ffi::NulError;
use std::fmt;
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use std::marker::PhantomData;
use std::mem;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::Drop;
use std::ops::Index;
use std::os::raw::c_char;
use std::ptr;
use std::slice;
use std::str::Utf8Error;
use tensorflow_sys as tf;
////////////////////////
/// Will panic if `msg` contains an embedded 0 byte.
macro_rules! invalid_arg {
($fmt:expr) => {
crate::Status::new_set(crate::Code::InvalidArgument, $fmt).unwrap()
};
($fmt:expr, $($arg:tt)*) => ({
let msg = format!($fmt, $($arg)*);
crate::Status::new_set(crate::Code::InvalidArgument, &msg).unwrap()
});
}
////////////////////////
macro_rules! impl_new {
($name: ident, $call:ident, $doc:expr) => {
impl $name {
#[doc = $doc]
pub fn new() -> Self {
unsafe {
let inner = tf::$call();
assert!(!inner.is_null());
$name { inner: inner }
}
}
}
};
}
////////////////////////
macro_rules! impl_drop {
($name: ident, $call:ident) => {
impl Drop for $name {
fn drop(&mut self) {
unsafe {
tf::$call(self.inner);
}
}
}
};
}
////////////////////////
macro_rules! c_enum {
($c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])*
$name:ident = $num:expr),* }) => {
$(#[$enum_attr])*
#[derive(PartialEq,Eq,PartialOrd,Ord,Debug,Copy,Clone)]
pub enum $enum_name {
/// Represents an unrecognized value.
///
/// This allows such values to come from the C API and be sent back to the
/// C API without loss in case new values are added in the future.
UnrecognizedEnumValue(c_uint),
$($(#[$attr])* $name),*
}
impl $enum_name {
#[allow(dead_code)]
fn from_int(value: c_uint) -> $enum_name {
match value {
$($num => $enum_name::$name,)*
c => $enum_name::UnrecognizedEnumValue(c),
}
}
#[allow(dead_code)]
fn to_int(&self) -> c_uint {
match self {
&$enum_name::UnrecognizedEnumValue(c) => c,
$(&$enum_name::$name => $num),*
}
}
#[allow(dead_code)]
fn to_c(&self) -> tf::$c_name {
unsafe {
::std::mem::transmute(self.to_int())
}
}
#[allow(dead_code)]
fn from_c(value: tf::$c_name) -> $enum_name {
$enum_name::from_int(value as c_uint)
}
}
impl ::std::fmt::Display for $enum_name {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
match self {
$(&$enum_name::$name => f.write_str(stringify!($name)),)*
&$enum_name::UnrecognizedEnumValue(c) => write!(f, "UnrecognizedEnumValue({})", c),
}
}
}
};
($c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])*
$name:ident = $num:expr,)* }) => {
c_enum!($c_name, $(#[$enum_attr])* $enum_name { $( $(#[$attr])* $name = $num),* });
};
// Deprecated pattern.
($doc:expr, $c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])* value
$name:ident = $num:expr),* }) => {
c_enum!($c_name, #[doc = $doc] $(#[$enum_attr])*
$enum_name { $( $(#[$attr])* $name = $num),* });
};
// Deprecated pattern.
($doc:expr, $c_name:ident, $(#[$enum_attr:meta])* $enum_name:ident { $( $(#[$attr:meta])* value
$name:ident = $num:expr,)* }) => {
c_enum!($c_name, #[doc = $doc] $(#[$enum_attr])*
$enum_name { $( $(#[$attr])* $name = $num),* });
}
}
////////////////////////
mod buffer;
use crate::buffer::Buffer;
mod graph;
pub use crate::graph::*;
#[cfg(feature = "experimental_training")]
mod scope;
#[cfg(feature = "experimental_training")]
pub use crate::scope::*;
mod session;
pub use crate::session::*;
pub mod expr;
pub mod io;
#[cfg(feature = "experimental_training")]
pub mod ops;
////////////////////////
c_enum!("Error values that can be returned.", TF_Code, Code {
/// Not an error; returned on success.
value Ok = 0,
/// The operation was cancelled (typically by the caller).
value Cancelled = 1,
/// Unknown error. An example of where this error may be returned is
/// if a Status value received from another address space belongs to
/// an error-space that is not known in this address space. Also
/// errors raised by APIs that do not return enough error information
/// may be converted to this error.
value Unknown = 2,
/// Client specified an invalid argument. Note that this differs
/// from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
/// that are problematic regardless of the state of the system
/// (e.g., a malformed file name).
value InvalidArgument = 3,
/// Deadline expired before operation could complete. For operations
/// that change the state of the system, this error may be returned
/// even if the operation has completed successfully. For example, a
/// successful response from a server could have been delayed long
/// enough for the deadline to expire.
value DeadlineExceeded = 4,
/// Some requested entity (e.g., file or directory) was not found.
/// For privacy reasons, this code *may* be returned when the client
/// does not have the access right to the entity.
value NotFound = 5,
/// Some entity that we attempted to create (e.g., file or directory)
/// already exists.
value AlreadyExists = 6,
/// The caller does not have permission to execute the specified
/// operation. PERMISSION_DENIED must not be used for rejections
/// caused by exhausting some resource (use RESOURCE_EXHAUSTED
/// instead for those errors). PERMISSION_DENIED must not be
/// used if the caller can not be identified (use UNAUTHENTICATED
/// instead for those errors).
value PermissionDenied = 7,
/// Some resource has been exhausted, perhaps a per-user quota, or
/// perhaps the entire file system is out of space.
value ResourceExhausted = 8,
/// Operation was rejected because the system is not in a state
/// required for the operation's execution. For example, directory
/// to be deleted may be non-empty, an rmdir operation is applied to
/// a non-directory, etc.
///
/// A litmus test that may help a service implementor in deciding
/// between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
/// (a) Use UNAVAILABLE if the client can retry just the failing call.
/// (b) Use ABORTED if the client should retry at a higher-level
/// (e.g., restarting a read-modify-write sequence).
/// (c) Use FAILED_PRECONDITION if the client should not retry until
/// the system state has been explicitly fixed. E.g., if an "rmdir"
/// fails because the directory is non-empty, FAILED_PRECONDITION
/// should be returned since the client should not retry unless
/// they have first fixed up the directory by deleting files from it.
/// (d) Use FAILED_PRECONDITION if the client performs conditional
/// REST Get/Update/Delete on a resource and the resource on the
/// server does not match the condition. E.g., conflicting
/// read-modify-write on the same resource.
value FailedPrecondition = 9,
/// The operation was aborted, typically due to a concurrency issue
/// like sequencer check failures, transaction aborts, etc.
///
/// See litmus test above for deciding between FAILED_PRECONDITION,
/// ABORTED, and UNAVAILABLE.
value Aborted = 10,
/// Operation tried to iterate past the valid input range. E.g., seeking or
/// reading past end of file.
///
/// Unlike INVALID_ARGUMENT, this error indicates a problem that may
/// be fixed if the system state changes. For example, a 32-bit file
/// system will generate INVALID_ARGUMENT if asked to read at an
/// offset that is not in the range [0,2<sup>32</sup>-1], but it will generate
/// OUT_OF_RANGE if asked to read from an offset past the current
/// file size.
///
/// There is a fair bit of overlap between FAILED_PRECONDITION and
/// OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific
/// error) when it applies so that callers who are iterating through
/// a space can easily look for an OUT_OF_RANGE error to detect when
/// they are done.
value OutOfRange = 11,
/// Operation is not implemented or not supported/enabled in this service.
value Unimplemented = 12,
/// Internal errors. Means some invariants expected by underlying
/// system has been broken. If you see one of these errors,
/// something is very broken.
value Internal = 13,
/// The service is currently unavailable. This is a most likely a
/// transient condition and may be corrected by retrying with
/// a backoff.
///
/// See litmus test above for deciding between FAILED_PRECONDITION,
/// ABORTED, and UNAVAILABLE.
value Unavailable = 14,
/// Unrecoverable data loss or corruption.
value DataLoss = 15,
/// The request does not have valid authentication credentials for the
/// operation.
value Unauthenticated = 16,
});
////////////////////////
c_enum!("Type of a single tensor element.", TF_DataType, DataType {
/// 32-bit floating point.
value Float = 1,
/// 64-bit floating point.
value Double = 2,
/// 32-bit signed integer.
value Int32 = 3,
/// 8-bit unsigned integer.
value UInt8 = 4,
/// 16-bit signed integer.
value Int16 = 5,
/// 8-bit signed integer.
value Int8 = 6,
/// String.
value String = 7,
/// Complex number composed of two 32-bit floats.
value Complex64 = 8,
/// 64-bit signed integer.
value Int64 = 9,
/// Boolean.
value Bool = 10,
/// Quantized 8-bit signed integer.
value QInt8 = 11,
/// Quantized 8-bit unsigned integer.
value QUInt8 = 12,
/// Quantized 32-bit signed integer.
value QInt32 = 13,
/// Float32 truncated to 16 bits. Only for cast ops.
value BFloat16 = 14,
/// Quantized 16-bit signed integer.
value QInt16 = 15,
/// Quantized 16-bit unsigned integer.
value QUInt16 = 16,
/// 16-bit unsigned integer.
value UInt16 = 17,
/// Complex number composed of two 64-bit floats.
value Complex128 = 18,
/// 16-bit floating point.
value Half = 19,
/// TensorFlow Resource (name, container, device,...)
value Resource = 20,
/// A dynamic type similar to std::any::Any.
value Variant = 21,
/// 32-bit unsigned integer.
value UInt32 = 22,
/// 64-bit unsigned integer.
value UInt64 = 23,
});
impl Default for DataType {
fn default() -> DataType {
DataType::Float
}
}
////////////////////////
/// Holds error information when communicating with back and forth with `tensorflow`.
///
/// It either has an `Code::Ok` code, or otherwise an error code with an associated message.
pub struct Status {
inner: *mut tf::TF_Status,
}
impl_new!(
Status,
TF_NewStatus,
"Creates a status with `Code::Ok` and no message."
);
impl_drop!(Status, TF_DeleteStatus);
impl Status {
/// Creates a status and sets its code and message.
pub fn new_set(code: Code, msg: &str) -> std::result::Result<Status, NulError> {
let mut status = Status::new();
status.set(code, msg)?;
Ok(status)
}
/// Returns the status's code.
pub fn code(&self) -> Code {
unsafe { Code::from_int(tf::TF_GetCode(self.inner) as u32) }
}
/// Returns true if the status's code is `Code::Ok`.
pub fn is_ok(&self) -> bool {
self.code() == Code::Ok
}
/// Turns the current `Status` into a `Result`.
fn into_result(self) -> Result<()> {
if self.is_ok() {
Ok(())
} else {
Err(self)
}
}
/// Sets the code and message.
pub fn set(&mut self, code: Code, msg: &str) -> std::result::Result<(), NulError> {
let message = CString::new(msg)?;
unsafe {
tf::TF_SetStatus(self.inner, code.to_c(), message.as_ptr());
}
Ok(())
}
/// Returns a mutable pointer to the inner tensorflow Status `TF_Status`.
fn inner(&mut self) -> *mut tf::TF_Status {
self.inner
}
}
impl Display for Status {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}: ", self.code())?;
let msg = unsafe {
match CStr::from_ptr(tf::TF_Message(self.inner)).to_str() {
Ok(s) => s,
Err(_) => "<invalid UTF-8 in message>",
}
};
f.write_str(msg)
}
}
impl Debug for Status {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{{inner:{:?}, ", self.inner)?;
write!(f, "{}: ", self.code())?;
let msg = unsafe {
match CStr::from_ptr(tf::TF_Message(self.inner)).to_str() {
Ok(s) => s,
Err(_) => "<invalid UTF-8 in message>",
}
};
f.write_str(msg)?;
write!(f, "}}")?;
Ok(())
}
}
impl From<NulError> for Status {
fn from(_e: NulError) -> Self {
invalid_arg!("String contained NUL byte")
}
}
impl From<Utf8Error> for Status {
fn from(_e: Utf8Error) -> Self {
invalid_arg!("String contained invalid UTF-8")
}
}
impl From<IntoStringError> for Status {
fn from(e: IntoStringError) -> Self {
invalid_arg!(
"Error converting C string to Rust string: {}",
e.description()
)
}
}
impl Error for Status {
fn description(&self) -> &str {
unsafe {
match CStr::from_ptr(tf::TF_Message(self.inner)).to_str() {
Ok(s) => s,
Err(_) => "<invalid UTF-8 in message>",
}
}
}
fn cause(&self) -> Option<&dyn Error> {
None
}
}
////////////////////////
/// Options that can be passed during session creation.
#[derive(Debug)]
pub struct SessionOptions {
inner: *mut tf::TF_SessionOptions,
}
impl SessionOptions {
/// Set the target.
///
/// `target` can be empty, a single entry, or a comma separated list of entries.
/// Each entry is in one of the following formats :
///
/// - "local"
/// - ip:port
/// - host:port
pub fn set_target(&mut self, target: &str) -> std::result::Result<(), NulError> {
let cstr = CString::new(target)?;
unsafe {
tf::TF_SetTarget(self.inner, cstr.as_ptr());
}
Ok(())
}
/// Set the config.
///
/// `config` should be a serialized [`ConfigProto` proto](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/protobuf/config.proto).
/// Returns an error if config was not parsed successfully as a `ConfigProto`.
pub fn set_config(&mut self, config: &[u8]) -> Result<()> {
let mut status = Status::new();
unsafe {
tf::TF_SetConfig(
self.inner,
config.as_ptr() as *const _,
config.len(),
status.inner(),
);
}
if status.is_ok() {
Ok(())
} else {
Err(status)
}
}
}
impl_new!(
SessionOptions,
TF_NewSessionOptions,
"Creates a blank set of options."
);
impl_drop!(SessionOptions, TF_DeleteSessionOptions);
////////////////////////
/// Convenience type for `Result` with `Status` as the error type.
pub type Result<T> = std::result::Result<T, Status>;
////////////////////////
/// A Rust type that maps to a `DataType`.
///
/// Currently, all implementors must *not* implement Drop (or transitively contain
/// anything that does) and must be bit-for-bit compatible with the corresponding C
/// type. Clients must not implement this trait.
///
/// This trait doesn't require `num::Zero` or `num::One` because some tensor
/// types (such as `bool` and `String`) don't implement them and we need to
/// supply custom implementations.
pub trait TensorType: Default + Clone + Display + Debug + 'static {
/// Tensor representation for this type. Normally `TensorDataCRepr` for types
/// that have the same representation in Rust; or `TensorDataNoCRepr` for
/// types where the Rust and C representations differ.
#[doc(hidden)]
type InnerType: TensorInner<Self>;
/// Returns the DataType that corresponds to this type.
fn data_type() -> DataType;
/// Returns the zero value.
fn zero() -> Self;
/// Returns the one value.
fn one() -> Self;
/// Return true if the data has the same representation in C and Rust and
/// can be written/read directly.
fn is_repr_c() -> bool;
/// Unpacks data from C. Returns an error if `is_repr_c()` is true for this
/// type or some other error occurred.
fn unpack(data: &[u8], count: usize) -> Result<Vec<Self>>;
/// Returns the number of bytes in the packed representation. If
/// `is_repr_c()` returns true, this will return 0.
fn packed_size(data: &[Self]) -> usize;
/// Packs data for sending to C. Returns an error if `is_repr_c()` returns
/// true for this type or some other error occurred. The size of the buffer
/// must be at least as large as the value returned by `packed_size(data)`.
fn pack(data: &[Self], buffer: &mut [u8]) -> Result<()>;
}
macro_rules! tensor_type {
($rust_type:ty, $tensor_type:ident, $zero:expr, $one:expr) => {
impl TensorType for $rust_type {
type InnerType = TensorDataCRepr<$rust_type>;
fn data_type() -> DataType {
DataType::$tensor_type
}
fn zero() -> Self {
$zero
}
fn one() -> Self {
$one
}
fn is_repr_c() -> bool {
true
}
fn unpack(_data: &[u8], _count: usize) -> Result<Vec<Self>> {
Err(Status::new_set(
Code::Unimplemented,
concat!("Unpacking is not necessary for ", stringify!($rust_type)),
)
.unwrap())
}
fn packed_size(_data: &[Self]) -> usize {
0
}
fn pack(_data: &[Self], _buffer: &mut [u8]) -> Result<()> {
Err(Status::new_set(
Code::Unimplemented,
concat!("Packing is not necessary for ", stringify!($rust_type)),
)
.unwrap())
}
}
};
}
tensor_type!(f16, Half, half::consts::ZERO, half::consts::ONE);
tensor_type!(f32, Float, 0.0, 1.0);
tensor_type!(f64, Double, 0.0, 1.0);
tensor_type!(i32, Int32, 0, 1);
tensor_type!(u8, UInt8, 0, 1);
tensor_type!(u16, UInt16, 0, 1);
tensor_type!(u32, UInt32, 0, 1);
tensor_type!(u64, UInt64, 0, 1);
tensor_type!(i16, Int16, 0, 1);
tensor_type!(i8, Int8, 0, 1);
tensor_type!(
Complex<f32>,
Complex64,
Complex::new(0.0, 0.0),
Complex::new(1.0, 0.0)
);
tensor_type!(
Complex<f64>,
Complex128,
Complex::new(0.0, 0.0),
Complex::new(1.0, 0.0)
);
tensor_type!(i64, Int64, 0, 1);
tensor_type!(bool, Bool, false, true);
macro_rules! q_type {
($rust_type:ident, $(#[$attr:meta])* type $q_type:ident) => {
$(#[$attr])*
#[derive(Clone,Copy,Default,Debug,Eq,PartialEq,Ord,PartialOrd)]
pub struct $q_type($rust_type);
impl Display for $q_type {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
<$rust_type as Display>::fmt(&self.0, f)
}
}
impl From<$rust_type> for $q_type {
fn from(x: $rust_type) -> Self {
$q_type(x)
}
}
tensor_type!($q_type, $q_type, $q_type(0), $q_type(1));
}
}
q_type!(i8,
/// Quantized type for i8.
type QInt8);
q_type!(u8,
/// Quantized type for u8.
type QUInt8);
q_type!(i16,
/// Quantized type for i16.
type QInt16);
q_type!(u16,
/// Quantized type for u16.
type QUInt16);
q_type!(i32,
/// Quantized type for i32.
type QInt32);
////////////////////////
/// BFloat16 provides a Rust type for BFloat16.
#[derive(Debug, Clone, Copy, Default)]
pub struct BFloat16(u16);
impl Display for BFloat16 {
fn fmt(&self, f: &mut Formatter<'_>) -> ::std::fmt::Result {
let val: f32 = (*self).into();
Display::fmt(&val, f)
}
}
impl Into<f32> for BFloat16 {
fn into(self) -> f32 {
unsafe {
// Assumes that the architecture uses IEEE-754 natively for floats
// and twos-complement for integers.
mem::transmute::<u32, f32>((self.0 as u32) << 16)
}
}
}
impl From<f32> for BFloat16 {
fn from(value: f32) -> Self {
unsafe {
// Assumes that the architecture uses IEEE-754 natively for floats
// and twos-complement for integers.
BFloat16((mem::transmute::<f32, u32>(value) >> 16) as u16)
}
}
}
impl PartialEq for BFloat16 {
fn eq(&self, other: &BFloat16) -> bool {
let x: f32 = (*self).into();
let y: f32 = (*other).into();
x.eq(&y)
}
}
impl PartialOrd for BFloat16 {
fn partial_cmp(&self, other: &BFloat16) -> Option<Ordering> {
let x: f32 = (*self).into();
let y: f32 = (*other).into();
x.partial_cmp(&y)
}
}
tensor_type!(
BFloat16,
BFloat16,
BFloat16::from(0.0f32),
BFloat16::from(1.0f32)
);
////////////////////////
impl TensorType for String {
type InnerType = TensorDataNoCRepr<String>;
fn data_type() -> DataType {
DataType::String
}
fn zero() -> Self {
"".to_string()
}
fn one() -> Self {
"\u{0001}".to_string()
}
fn is_repr_c() -> bool {
false
}
fn unpack(data: &[u8], count: usize) -> Result<Vec<Self>> {
let offsets = unsafe { slice::from_raw_parts(data.as_ptr() as *const u64, count) };
let mut out = Vec::with_capacity(count);
let mut status = Status::new();
let base_offset = mem::size_of::<u64>() * count;
for offset in offsets {
let off = *offset as usize + base_offset;
#[allow(trivial_casts)]
let src = &data[off] as *const u8 as *const c_char;
let src_len = data.len() - off;
let mut dst_len: usize = 0;
let mut dst: *const c_char = ptr::null();
unsafe {
tf::TF_StringDecode(src, src_len, &mut dst, &mut dst_len, status.inner());
}
if !status.is_ok() {
return Err(status);
}
let string_data = unsafe { slice::from_raw_parts(dst as *const u8, dst_len) };
out.push(std::str::from_utf8(string_data)?.to_string());
}
Ok(out)
}
fn packed_size(data: &[Self]) -> usize {
let string_data: usize = data
.iter()
.map(|s| unsafe { tf::TF_StringEncodedSize(s.len()) })
.sum();
mem::size_of::<u64>() * data.len() + string_data
}
fn pack(data: &[Self], buffer: &mut [u8]) -> Result<()> {
let offsets: &mut [u64] =
unsafe { slice::from_raw_parts_mut(buffer.as_mut_ptr() as *mut u64, data.len()) };
let base_offset = mem::size_of::<u64>() * data.len();
let mut offset = base_offset;
let mut status = Status::new();
for i in 0..data.len() {
offsets[i] = (offset - base_offset) as u64;
let src = data[i].as_ptr() as *const c_char;
let src_len = data[i].len();
let dst: *mut u8 = &mut buffer[offset];
let dst_len = buffer.len() - offset;
offset += unsafe {
tf::TF_StringEncode(src, src_len, dst as *mut c_char, dst_len, status.inner())
};
if !status.is_ok() {
return Err(status);
}
}
Ok(())
}
}
////////////////////////
trait AnyTensor: Debug {
fn inner(&self) -> Result<*mut tf::TF_Tensor>;
}
////////////////////////
unsafe fn tensor_dims(tensor: *mut tf::TF_Tensor) -> Vec<u64> {
let mut dims = Vec::with_capacity(tf::TF_NumDims(tensor) as usize);
for i in 0..dims.capacity() {
dims.push(tf::TF_Dim(tensor, i as c_int) as u64);
}
dims
}
/// Inner representation of `Tensor`s.
#[doc(hidden)]
pub trait TensorInner<T>: Debug + Clone
where
Self: Sized + Deref<Target = [T]> + DerefMut<Target = [T]>,
{
/// Return the inner representation of a tensor with the given
/// dimensions.
fn new_inner(dims: &[u64]) -> Self;
/// Wraps a TF_Tensor. Returns None if types don't match.
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self>;
/// Return a mutable pointer to the C tensor.
fn as_mut_ptr(&self, dims: &Vec<u64>) -> Result<*mut tf::TF_Tensor>;
}
////////////////////////
/// Inner representation for `Tensor`s of types where C and Rust have the
/// same representation.
#[derive(Debug)]
#[doc(hidden)]
pub struct TensorDataCRepr<T>
where
T: TensorType,
{
inner: *mut tf::TF_Tensor,
/// Equal to the product of the tensor's dimensions.
data_count: usize,
phantom: PhantomData<T>,
}
unsafe impl<T> Send for TensorDataCRepr<T> where T: TensorType {}
unsafe impl<T> Sync for TensorDataCRepr<T> where T: TensorType {}
impl<T: TensorType> Drop for TensorDataCRepr<T> {
fn drop(&mut self) {
if !self.inner.is_null() {
unsafe {
tf::TF_DeleteTensor(self.inner);
}
}
self.inner = ptr::null_mut();
}
}
impl<T> TensorInner<T> for TensorDataCRepr<T>
where
T: Debug + TensorType + Copy,
{
fn new_inner(dims: &[u64]) -> Self {
let total = product(dims) as usize;
unsafe {
let inner = tf::TF_AllocateTensor(
T::data_type().to_c(),
dims.as_ptr() as *const _,
dims.len() as c_int,
total * mem::size_of::<T>(),
);
// Zero-initialize allocated memory.
let data = tf::TF_TensorData(inner);
let byte_size = tf::TF_TensorByteSize(inner);
libc::memset(data as *mut libc::c_void, 0, byte_size);
TensorDataCRepr {
inner,
data_count: total,
phantom: PhantomData,
}
}
}
// Wraps a TF_Tensor. Returns None if types don't match.
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self> {
if DataType::from_c(tf::TF_TensorType(tensor)) != T::data_type() {
return None;
}
Some(TensorDataCRepr {
inner: tensor,
data_count: product(&tensor_dims(tensor)) as usize,
phantom: PhantomData,
})
}
fn as_mut_ptr(&self, _dims: &Vec<u64>) -> Result<*mut tf::TF_Tensor> {
assert!(!self.inner.is_null());
Ok(self.inner)
}
}
impl<T: TensorType> Deref for TensorDataCRepr<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
let data = unsafe { tf::TF_TensorData(self.inner) } as *mut T;
unsafe { slice::from_raw_parts(data, self.data_count) }
}
}
impl<T: TensorType> DerefMut for TensorDataCRepr<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
let data = unsafe { tf::TF_TensorData(self.inner) } as *mut T;
unsafe { slice::from_raw_parts_mut(data, self.data_count) }
}
}
impl<T: TensorType + Copy> Clone for TensorDataCRepr<T> {
fn clone(&self) -> Self {
let (inner, total) = unsafe {
let dims = tensor_dims(self.inner);
let total = product(&dims) as usize;
let inner = tf::TF_AllocateTensor(
T::data_type().to_c(),
dims.as_ptr() as *const _,
dims.len() as c_int,
total * mem::size_of::<T>(),
);
(inner, total)
};
let mut clone = TensorDataCRepr {
inner,
data_count: total,
phantom: PhantomData,
};
clone.deref_mut().copy_from_slice(self.deref());
clone
}
}
////////////////////////
/// Inner representation for `Tensor`s of types where C and Rust have
/// different representations.
#[derive(Debug)]
#[doc(hidden)]
pub struct TensorDataNoCRepr<T>
where
T: TensorType,
{
inner: Cell<*mut tf::TF_Tensor>,
/// Points to either the TF_Tensor data or the contents of `unpacked_data`.
data: Cell<*mut T>,
/// Equal to the product of the tensor's dimensions.
data_count: usize,
unpacked: Cell<bool>,
/// This is just an easy way to handle deallocation correctly. According to
/// the aliasing rules, we shouldn't touch this data because it can be
/// modified through `data`.
unpacked_data: RefCell<Option<Vec<T>>>,
}
impl<T> TensorInner<T> for TensorDataNoCRepr<T>
where
T: Debug + TensorType,
{
/// Creates a new tensor.
///
/// The data is initialized to zeros.
fn new_inner(dims: &[u64]) -> Self {
let total = product(dims) as usize;
let mut data = Vec::with_capacity(total);
data.resize(total, T::zero());
TensorDataNoCRepr {
inner: Cell::new(ptr::null_mut()),
data: Cell::new(data.as_mut_ptr()),
data_count: total,
unpacked: Cell::new(true),
unpacked_data: RefCell::new(Some(data)),
}
}
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self> {
if DataType::from_c(tf::TF_TensorType(tensor)) != T::data_type() {
return None;
}
Some(TensorDataNoCRepr {
inner: Cell::new(tensor),
data: Cell::new(tf::TF_TensorData(tensor) as *mut _),
data_count: product(&tensor_dims(tensor)) as usize,
unpacked: Cell::new(false),
unpacked_data: RefCell::new(None),
})
}
fn as_mut_ptr(&self, dims: &Vec<u64>) -> Result<*mut tf::TF_Tensor> {
let mut inner = self.inner.get();
if inner.is_null() {
let data: &[T] = self;
let packed_size = T::packed_size(data);
inner = unsafe {
let inner = tf::TF_AllocateTensor(
T::data_type().to_c(),
dims.as_ptr() as *const _,
dims.len() as c_int,
packed_size,
);
let buf =
slice::from_raw_parts_mut(tf::TF_TensorData(inner) as *mut u8, packed_size);
T::pack(data, buf)?;
inner
};
self.inner.set(inner);
}
Ok(inner)
}
}
impl<T: TensorType> Drop for TensorDataNoCRepr<T> {
fn drop(&mut self) {
self.drop_tensor();
}
}
impl<T> TensorDataNoCRepr<T>
where
T: TensorType,
{
// This will panic if `unpacked` is false and `unpacked_data` is already borrowed.
#[allow(trivial_numeric_casts)]
fn unpack(&self) {
if !self.unpacked.get() {
let mut data = self.unpacked_data.borrow_mut();
let tensor = self.inner.get();
let bytes = unsafe {
slice::from_raw_parts(
tf::TF_TensorData(tensor) as *const u8,
tf::TF_TensorByteSize(tensor) as usize,
)
};
// The unwrap() may panic (e.g. if a string contains a 0 byte),
// but there's nothing we can do. This function is always
// called from contexts that don't allow us to return an error.
let mut unpacked = T::unpack(bytes, self.data_count).unwrap();
assert_eq!(unpacked.len(), self.data_count);
self.data.set(unpacked.as_mut_ptr());
*data = Some(unpacked);
self.unpacked.set(true);
}
}
fn drop_tensor(&self) {
let inner = self.inner.get();
if !inner.is_null() {
unsafe {
tf::TF_DeleteTensor(inner);
}
}
self.inner.set(ptr::null_mut());
}
}
impl<T: TensorType> Deref for TensorDataNoCRepr<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
self.unpack();
unsafe { slice::from_raw_parts(self.data.get(), self.data_count) }
}
}
impl<T: TensorType> DerefMut for TensorDataNoCRepr<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
self.unpack();
// If the slice is modified, the tensor is stale.
self.drop_tensor();
unsafe { slice::from_raw_parts_mut(self.data.get(), self.data_count) }
}
}
impl<T: TensorType> Clone for TensorDataNoCRepr<T> {
fn clone(&self) -> Self {
let dims = unsafe { tensor_dims(self.inner.get()) };
let mut clone = TensorDataNoCRepr::new_inner(&dims);
clone.deref_mut().clone_from_slice(self.deref());
clone
}
}
/// Holds a multi-dimensional array of elements of a single data type.
///
/// The data buffer stores elements in row major order. E.g. if data is treated
/// as a vector of `T`:
///
/// ```text
/// element 0: index (0, ..., 0)
/// element 1: index (0, ..., 1)
/// ...
/// ```
#[derive(Debug, Clone, Eq)]
pub struct Tensor<T: TensorType> {
inner: T::InnerType,
dims: Vec<u64>,
}
#[inline]
fn product(values: &[u64]) -> u64 {
values.iter().product()
}
impl<T: TensorType> Tensor<T> {
/// Creates a new tensor.
///
/// The data is initialized to zeros.
pub fn new(dims: &[u64]) -> Self {
Tensor {
inner: T::InnerType::new_inner(dims),
dims: Vec::from(dims),
}
}
/// Sets (copies) the tensor values to the provided ones.
///
/// ```
/// # use tensorflow::Tensor;
/// let a = Tensor::new(&[2, 2]).with_values(&[0_i32, 1, 2, 3]).unwrap();
/// ```
pub fn with_values(mut self, value: &[T]) -> Result<Self> {
if self.len() != value.len() {
return Err(invalid_arg!(
"length of values array ({}) is not equal to tensor total elements ({})",
value.len(),
self.len()
));
}
for (e, v) in self.iter_mut().zip(value) {
e.clone_from(v);
}
Ok(self)
}
/// Returns the tensor's dimensions.
pub fn dims(&self) -> &[u64] {
&self.dims
}
/// Returns the tensor's dimensions as a Shape.
pub fn shape(&self) -> Shape {
Shape(Some(self.dims.iter().map(|d| Some(*d as i64)).collect()))
}
// Wraps a TF_Tensor. Returns None if types don't match.
unsafe fn from_tf_tensor(tensor: *mut tf::TF_Tensor) -> Option<Self> {
let mut dims = Vec::with_capacity(tf::TF_NumDims(tensor) as usize);
for i in 0..dims.capacity() {
dims.push(tf::TF_Dim(tensor, i as c_int) as u64);
}
Some(Tensor {
inner: T::InnerType::from_tf_tensor(tensor)?,
dims: dims,
})
}
}
impl<T: TensorType> AnyTensor for Tensor<T> {
fn inner(&self) -> Result<*mut tf::TF_Tensor> {
self.inner.as_mut_ptr(&self.dims)
}
}
impl<T: TensorType> Deref for Tensor<T> {
type Target = [T];
#[inline]
fn deref(&self) -> &[T] {
self.inner.deref()
}
}
impl<T: TensorType> DerefMut for Tensor<T> {
#[inline]
fn deref_mut(&mut self) -> &mut [T] {
self.inner.deref_mut()
}
}
impl<T: TensorType> From<T> for Tensor<T> {
fn from(value: T) -> Self {
let mut tensor = Tensor::new(&[]);
tensor[0] = value;
tensor
}
}
impl<'a, T: TensorType> From<&'a [T]> for Tensor<T> {
fn from(value: &'a [T]) -> Self {
let mut tensor: Tensor<T> = Tensor::new(&[value.len() as u64]);
for (e, v) in tensor.iter_mut().zip(value) {
e.clone_from(v);
}
tensor
}
}
impl<T: TensorType + PartialEq> PartialEq for Tensor<T> {
fn eq(&self, other: &Tensor<T>) -> bool {
self.dims == other.dims && self.deref() == other.deref()
}
}
fn write_tensor_recursive<T: Display>(
f: &mut Formatter<'_>,
shape: &[u64],
values: &[T],
) -> ::std::fmt::Result {
if shape.len() == 0 {
// Handle special case of a scalar tensor.
write!(f, "{}", values[0])
} else {
// Recur with values split into chunks of the next dims size,
// Surround with brackets and separate with comma.
write!(f, "[")?;
if shape[0] > 0 {
let chunk_size = values.len() / shape[0] as usize;
for i in 0..shape[0] as usize {
if i != 0 {
write!(f, ", ")?;
}
write_tensor_recursive(
f,
&shape[1..],
&values[i * chunk_size..(i + 1) * chunk_size],
)?;
}
}
write!(f, "]")
}
}
impl<T: TensorType> Display for Tensor<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> ::std::fmt::Result {
write_tensor_recursive(f, &self.dims, self)
}
}
////////////////////////
/// Dynamically loaded plugins.
/// The C API doesn't provide a way to unload libraries, so nothing happens when this
/// goes out of scope.
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct Library {
inner: *mut tf::TF_Library,
}
impl Library {
/// Loads a library.
pub fn load(library_filename: &str) -> Result<Self> {
let c_filename = CString::new(library_filename)?;
let mut status = Status::new();
let inner = unsafe { tf::TF_LoadLibrary(c_filename.as_ptr(), status.inner()) };
if inner.is_null() {
Err(status)
} else {
Ok(Library { inner: inner })
}
}
// TODO: Implement TF_GetOpList once we can deserialize protos.
}
////////////////////////
/// Returns a string describing version information of the
/// `TensorFlow` library. `TensorFlow` is using semantic versioning.
pub fn version() -> std::result::Result<String, Utf8Error> {
unsafe {
CStr::from_ptr(tf::TF_Version())
.to_str()
.map(|s| s.to_string())
}
}
/// Returns a serialized KernelList protocol buffer containing KernelDefs for
/// all registered kernels.
pub fn get_all_registered_kernels() -> Result<Vec<u8>> {
let mut status = Status::new();
let buf = unsafe {
let buf = tf::TF_GetAllRegisteredKernels(status.inner());
if !status.is_ok() {
return Err(status);
}
Buffer::<u8>::from_c(buf, true)
};
Ok(Vec::from(buf.as_ref()))
}
/// Returns a serialized KernelList protocol buffer containing KernelDefs for
/// all kernels registered for the operation named `name`.
pub fn get_registered_kernels_for_op(name: &str) -> Result<Vec<u8>> {
let c_name = CString::new(name)?;
let mut status = Status::new();
let buf = unsafe {
let buf = tf::TF_GetRegisteredKernelsForOp(c_name.as_ptr(), status.inner());
if !status.is_ok() {
return Err(status);
}
Buffer::<u8>::from_c(buf, true)
};
Ok(Vec::from(buf.as_ref()))
}
////////////////////////
/// A Shape is the shape of a tensor. A Shape may be an unknown rank, or it may
/// have a known rank with each dimension being known or unknown.
#[derive(Debug, Eq, Ord, PartialEq, PartialOrd, Hash, Clone, Default)]
pub struct Shape(Option<Vec<Option<i64>>>);
impl Shape {
/// Creates a new Shape.
pub fn new(s: Option<Vec<Option<i64>>>) -> Shape {
Shape(s)
}
/// Returns the number of dimensions if known, or None if unknown.
pub fn dims(&self) -> Option<usize> {
match *self {
Shape(None) => None,
Shape(Some(ref v)) => Some(v.len()),
}
}
}
impl From<Option<Vec<Option<i64>>>> for Shape {
fn from(data: Option<Vec<Option<i64>>>) -> Shape {
Shape(data)
}
}
impl Into<Option<Vec<Option<i64>>>> for Shape {
fn into(self) -> Option<Vec<Option<i64>>> {
self.0
}
}
static UNKNOWN_DIMENSION: Option<i64> = None;
impl Index<usize> for Shape {
type Output = Option<i64>;
fn index(&self, index: usize) -> &Option<i64> {
match self.0 {
None => &UNKNOWN_DIMENSION,
Some(ref v) => {
if index < v.len() {
&v[index]
} else {
&UNKNOWN_DIMENSION
}
}
}
}
}
impl Display for Shape {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
////////////////////////
mod while_loop;
pub use crate::while_loop::*;
////////////////////////
#[cfg(test)]
mod tests {
use super::*;
fn create_session() -> (Session, Graph) {
let graph = Graph::new();
let options = SessionOptions::new();
match Session::new(&options, &graph) {
Ok(session) => (session, graph),
Err(status) => panic!("Creating session failed with status: {}", status),
}
}
#[test]
fn smoke() {
create_session();
}
#[test]
fn test_close() {
let (mut session, _) = create_session();
let status = session.close();
assert!(status.is_ok());
}
#[test]
fn test_tensor() {
let mut tensor = <Tensor<f32>>::new(&[2, 3]);
assert_eq!(tensor.len(), 6);
tensor[0] = 1.0;
}
#[test]
fn test_tensor_native_type_zero() {
let tensor = <Tensor<i32>>::new(&[1000]);
// Checking against null-initialized slice/vector makes
// the unit test succeed often on repeated runs.
for v in tensor.as_ref() {
assert_eq!(0, *v);
}
}
#[test]
fn test_set_target() {
let mut options = SessionOptions::new();
options.set_target("local").unwrap();
}
#[test]
fn test_set_config() {
let mut options = SessionOptions::new();
// An empty array is a valid proto, since all fields are optional.
options.set_config(&vec![]).unwrap();
}
#[test]
fn test_run() {
// Graph is just y = 2 * x
let graph_proto = vec![
0x0a, 0x2a, 0x0a, 0x01, 0x78, 0x12, 0x0b, 0x50, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f,
0x6c, 0x64, 0x65, 0x72, 0x2a, 0x0b, 0x0a, 0x05, 0x64, 0x74, 0x79, 0x70, 0x65, 0x12,
0x02, 0x30, 0x01, 0x2a, 0x0b, 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x02,
0x3a, 0x00, 0x0a, 0x30, 0x0a, 0x03, 0x79, 0x2f, 0x79, 0x12, 0x05, 0x43, 0x6f, 0x6e,
0x73, 0x74, 0x2a, 0x0b, 0x0a, 0x05, 0x64, 0x74, 0x79, 0x70, 0x65, 0x12, 0x02, 0x30,
0x01, 0x2a, 0x15, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x0c, 0x42, 0x0a,
0x08, 0x01, 0x12, 0x00, 0x2a, 0x04, 0x00, 0x00, 0x00, 0x40, 0x0a, 0x19, 0x0a, 0x01,
0x79, 0x12, 0x03, 0x4d, 0x75, 0x6c, 0x1a, 0x01, 0x78, 0x1a, 0x03, 0x79, 0x2f, 0x79,
0x2a, 0x07, 0x0a, 0x01, 0x54, 0x12, 0x02, 0x30, 0x01,
];
let (session, mut graph) = create_session();
let opts = ImportGraphDefOptions::new();
let status = graph.import_graph_def(&graph_proto, &opts);
assert!(status.is_ok());
let mut x = <Tensor<f32>>::new(&[2]);
x[0] = 2.0;
x[1] = 3.0;
let mut step = SessionRunArgs::new();
let x_op = graph.operation_by_name_required("x").unwrap();
step.add_feed(&x_op, 0, &x);
let y_op = graph.operation_by_name_required("y").unwrap();
let output_ix = step.request_fetch(&y_op, 0);
session.run(&mut step).unwrap();
let output_tensor = step.fetch::<f32>(output_ix).unwrap();
assert_eq!(output_tensor.len(), 2);
assert_eq!(output_tensor[0], 4.0);
assert_eq!(output_tensor[1], 6.0);
}
#[test]
fn test_bfloat16() {
let data = [-1.0f32, 0.0, 1.0, 2.5];
for i in 0..data.len() {
let x = data[i];
let bfx = BFloat16::from(x);
assert_eq!(<BFloat16 as Into<f32>>::into(bfx), x);
assert_eq!(bfx.partial_cmp(&bfx), Some(Ordering::Equal));
assert!(bfx.eq(&bfx));
for j in 0..i {
let y = data[j];
let bfy = BFloat16::from(y);
assert_eq!(bfx.partial_cmp(&bfy), Some(Ordering::Greater));
assert_eq!(bfy.partial_cmp(&bfx), Some(Ordering::Less));
assert!(!bfx.eq(&bfy));
}
}
assert_eq!(<BFloat16 as Into<f32>>::into(BFloat16::default()), 0.0f32);
assert_eq!(BFloat16::from(1.5f32).to_string(), "1.5");
}
#[test]
fn test_f16() {
let data: Vec<f16> = vec![-1.0f32, 0.0, 1.0, 2.5]
.into_iter()
.map(|x| f16::from_f32(x))
.collect();
let tensor = <Tensor<f16>>::new(&[2, 2]).with_values(&data).unwrap();
assert_eq!(&tensor[..], &data[..]);
}
#[test]
fn test_strings() {
let mut g = Graph::new();
let x_op = {
let mut nd = g.new_operation("Placeholder", "x").unwrap();
nd.set_attr_type("dtype", DataType::String).unwrap();
nd.set_attr_shape("shape", &Shape(Some(vec![]))).unwrap();
nd.finish().unwrap()
};
let y_op = {
let mut nd = g.new_operation("EncodeBase64", "y").unwrap();
nd.add_input(x_op.clone());
nd.finish().unwrap()
};
let options = SessionOptions::new();
let session = Session::new(&options, &g).unwrap();
let mut x = <Tensor<String>>::new(&[2]);
x[0] = "foo".to_string();
x[1] = "bar".to_string();
let mut step = SessionRunArgs::new();
step.add_feed(&x_op, 0, &x);
let output_ix = step.request_fetch(&y_op, 0);
session.run(&mut step).unwrap();
let output_tensor = step.fetch::<String>(output_ix).unwrap();
assert_eq!(output_tensor.len(), 2);
assert_eq!(output_tensor[0], "Zm9v");
assert_eq!(output_tensor[1], "YmFy");
}
#[test]
fn tensor_clone() {
let x = Tensor::<i32>::new(&[3]).with_values(&[1, 2, 3]).unwrap();
let clone = x.clone();
assert_eq!(x, clone);
}
#[test]
fn tensor_eq() {
let a = Tensor::<i32>::new(&[3]).with_values(&[1, 2, 3]).unwrap();
let b = Tensor::<i32>::from(&[1, 2, 3][..]);
let c = Tensor::<i32>::new(&[3]).with_values(&[1, 2, 4]).unwrap();
let d = Tensor::<i32>::new(&[3, 1]).with_values(&[1, 2, 3]).unwrap();
assert_eq!(a, b);
assert_ne!(a, c);
assert_ne!(a, d);
}
#[test]
fn tensor_display() {
let tests = [
("1", &[][..], &[1][..]),
("[1]", &[1], &[1]),
("[1, 2]", &[2], &[1, 2]),
("[[1, 2], [3, 4]]", &[2, 2], &[1, 2, 3, 4]),
("[[[1], [2]], [[3], [4]]]", &[2, 2, 1], &[1, 2, 3, 4]),
("[[[1, 2]], [[3, 4]]]", &[2, 1, 2], &[1, 2, 3, 4]),
("[[[[], []]], [[[], []]]]", &[2, 1, 2, 0], &[]),
("[[], []]", &[2, 0], &[]),
("[[], []]", &[2, 0, 2], &[]),
("[]", &[0], &[]),
("[]", &[0, 0], &[]),
];
for &(expected, shape, values) in tests.iter() {
let tensor = Tensor::<i32>::new(shape).with_values(values).unwrap();
assert_eq!(expected, format!("{}", tensor));
}
}
#[test]
fn test_get_all_registered_kernels() {
assert!(get_all_registered_kernels().unwrap().len() > 0);
}
#[test]
fn test_get_registered_kernels_for_op() {
assert!(get_registered_kernels_for_op("Add").unwrap().len() > 0);
}
}
|
#![feature(read_exact)]
#[macro_use]
extern crate quick_error;
extern crate byteorder;
use std::path::Path;
use std::fs::File;
use std::io::{self, BufReader, BufWriter};
use std::io::prelude::*;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
quick_error! {
#[derive(Debug)]
pub enum StringReadError {
Io(err: io::Error) {
from()
}
ByteOrder(err: byteorder::Error) {
from()
}
MissingNullTerminator {}
}
}
#[derive(Debug, Clone)]
pub struct StringTable {
pub offsets: Vec<u32>,
pub strings: Vec<String>,
}
#[derive(Debug)]
pub enum ChunkContent {
Form(Vec<Chunk>),
Gen8(Vec<u8>),
Optn(Vec<u8>),
Extn(Vec<u8>),
Sound(Vec<u8>),
Agrp(Vec<u8>),
Sprite(Vec<u8>),
Background(Vec<u8>),
Path(Vec<u8>),
Script(Vec<u8>),
Shader(Vec<u8>),
Font(Vec<u8>),
Timeline(Vec<u8>),
Object(Vec<u8>),
Room(Vec<u8>),
Dafl(Vec<u8>),
Tpag(Vec<u8>),
Code(Vec<u8>),
Vari(Vec<u8>),
Function(Vec<u8>),
StringTable(StringTable),
Txtr(Vec<u8>),
Audio(Vec<u8>),
}
#[derive(Debug)]
pub struct Chunk {
pub content: ChunkContent,
pub size: i32,
}
pub const TYPE_ID_LEN: usize = 4;
pub const CHUNK_HEADER_LEN: usize = TYPE_ID_LEN + 4;
quick_error! {
#[derive(Debug)]
pub enum LoadError {
Io(err: io::Error) {
from()
}
ByteOrder(err: byteorder::Error) {
from()
}
String(err: StringReadError) {
from()
}
}
}
fn read_into_byte_vec<R: Read>(reader: &mut R, len: usize) -> Result<Vec<u8>, io::Error> {
let mut vec = Vec::with_capacity(len);
unsafe {
vec.set_len(len);
try!(reader.read_exact(&mut vec));
}
Ok(vec)
}
fn read_string<R: Read>(reader: &mut R) -> Result<String, StringReadError> {
let len = try!(reader.read_u32::<LittleEndian>());
let mut buf = Vec::with_capacity(len as usize);
unsafe {
buf.set_len(len as usize);
try!(reader.read_exact(&mut buf));
}
let terminator = try!(reader.read_u8());
if terminator == 0 {
// We assume strings are valid UTF-8, if not, panic.
Ok(String::from_utf8(buf).unwrap())
} else {
Err(StringReadError::MissingNullTerminator)
}
}
fn read_chunk<R: Read>(reader: &mut R) -> Result<Chunk, LoadError> {
let mut type_id = [0u8; TYPE_ID_LEN];
try!(reader.read_exact(&mut type_id));
let size = try!(reader.read_i32::<LittleEndian>());
let content = match &type_id {
b"FORM" => {
let mut chunks = Vec::new();
let mut bytes_left = size;
while bytes_left > 0 {
let chunk = try!(read_chunk(reader));
bytes_left -= chunk.size + CHUNK_HEADER_LEN as i32;
chunks.push(chunk);
}
ChunkContent::Form(chunks)
}
b"GEN8" => {
ChunkContent::Gen8(try!(read_into_byte_vec(reader, size as usize)))
}
b"OPTN" => {
ChunkContent::Optn(try!(read_into_byte_vec(reader, size as usize)))
}
b"EXTN" => {
ChunkContent::Extn(try!(read_into_byte_vec(reader, size as usize)))
}
b"SOND" => {
ChunkContent::Sound(try!(read_into_byte_vec(reader, size as usize)))
}
b"AGRP" => {
ChunkContent::Agrp(try!(read_into_byte_vec(reader, size as usize)))
}
b"SPRT" => {
ChunkContent::Sprite(try!(read_into_byte_vec(reader, size as usize)))
}
b"BGND" => {
ChunkContent::Background(try!(read_into_byte_vec(reader, size as usize)))
}
b"PATH" => {
ChunkContent::Path(try!(read_into_byte_vec(reader, size as usize)))
}
b"SCPT" => {
ChunkContent::Script(try!(read_into_byte_vec(reader, size as usize)))
}
b"SHDR" => {
ChunkContent::Shader(try!(read_into_byte_vec(reader, size as usize)))
}
b"FONT" => {
ChunkContent::Font(try!(read_into_byte_vec(reader, size as usize)))
}
b"TMLN" => {
ChunkContent::Timeline(try!(read_into_byte_vec(reader, size as usize)))
}
b"OBJT" => {
ChunkContent::Object(try!(read_into_byte_vec(reader, size as usize)))
}
b"ROOM" => {
ChunkContent::Room(try!(read_into_byte_vec(reader, size as usize)))
}
b"DAFL" => {
ChunkContent::Dafl(try!(read_into_byte_vec(reader, size as usize)))
}
b"TPAG" => {
ChunkContent::Tpag(try!(read_into_byte_vec(reader, size as usize)))
}
b"CODE" => {
ChunkContent::Code(try!(read_into_byte_vec(reader, size as usize)))
}
b"VARI" => {
ChunkContent::Vari(try!(read_into_byte_vec(reader, size as usize)))
}
b"FUNC" => {
ChunkContent::Function(try!(read_into_byte_vec(reader, size as usize)))
}
b"STRG" => {
let count = try!(reader.read_u32::<LittleEndian>());
let mut offsets = Vec::with_capacity(count as usize);
for _ in 0..count {
let offset = try!(reader.read_u32::<LittleEndian>());
offsets.push(offset);
}
let mut strings = Vec::with_capacity(count as usize);
for _ in 0..count {
let string = try!(read_string(reader));
strings.push(string);
}
// TODO: Why do we need to consume additional 4 bytes?
// Looks like 4 zero bytes.
let mut buf = [0u8; 4];
try!(reader.read_exact(&mut buf));
ChunkContent::StringTable(StringTable {
offsets: offsets,
strings: strings,
})
}
b"TXTR" => {
ChunkContent::Txtr(try!(read_into_byte_vec(reader, size as usize)))
}
b"AUDO" => {
ChunkContent::Audio(try!(read_into_byte_vec(reader, size as usize)))
}
_ => panic!("Unknown type id \"{}\"", String::from_utf8_lossy(&type_id)),
};
Ok(Chunk {
content: content,
size: size,
})
}
fn write_chunk<W: Write>(writer: &mut W, chunk: &Chunk) -> Result<(), io::Error> {
match chunk.content {
ChunkContent::Form(ref chunks) => {
try!(writer.write_all(b"FORM"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
for chunk in chunks.iter() {
try!(write_chunk(writer, chunk));
}
Ok(())
}
ChunkContent::Gen8(ref vec) => {
try!(writer.write_all(b"GEN8"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Optn(ref vec) => {
try!(writer.write_all(b"OPTN"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Extn(ref vec) => {
try!(writer.write_all(b"EXTN"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Sound(ref vec) => {
try!(writer.write_all(b"SOND"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Agrp(ref vec) => {
try!(writer.write_all(b"AGRP"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Sprite(ref vec) => {
try!(writer.write_all(b"SPRT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Background(ref vec) => {
try!(writer.write_all(b"BGND"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Path(ref vec) => {
try!(writer.write_all(b"PATH"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Script(ref vec) => {
try!(writer.write_all(b"SCPT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Shader(ref vec) => {
try!(writer.write_all(b"SHDR"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Font(ref vec) => {
try!(writer.write_all(b"FONT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Timeline(ref vec) => {
try!(writer.write_all(b"TMLN"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Object(ref vec) => {
try!(writer.write_all(b"OBJT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Room(ref vec) => {
try!(writer.write_all(b"ROOM"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Dafl(ref vec) => {
try!(writer.write_all(b"DAFL"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Tpag(ref vec) => {
try!(writer.write_all(b"TPAG"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Code(ref vec) => {
try!(writer.write_all(b"CODE"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Vari(ref vec) => {
try!(writer.write_all(b"VARI"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Function(ref vec) => {
try!(writer.write_all(b"FUNC"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::StringTable(ref table) => {
try!(writer.write_all(b"STRG"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_u32::<LittleEndian>(table.strings.len() as u32));
for offs in &table.offsets {
try!(writer.write_u32::<LittleEndian>(*offs));
}
for string in &table.strings {
try!(writer.write_u32::<LittleEndian>(string.len() as u32));
try!(writer.write_all(string.as_bytes()));
try!(writer.write_u8(0));
}
// Required padding
try!(writer.write_all(&[0u8; 4]));
Ok(())
}
ChunkContent::Txtr(ref vec) => {
try!(writer.write_all(b"TXTR"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Audio(ref vec) => {
try!(writer.write_all(b"AUDO"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
}
}
pub fn load_from_file<P: AsRef<Path>>(path: P) -> Result<Chunk, LoadError> {
let file = try!(File::open(path));
let mut reader = BufReader::new(file);
read_chunk(&mut reader)
}
impl Chunk {
pub fn save_to_file<P: AsRef<Path>>(&self, path: P) -> Result<(), io::Error> {
let file = try!(File::create(path));
let mut writer = BufWriter::new(file);
write_chunk(&mut writer, self)
}
pub fn content_len(&self) -> i32 {
match self.content {
ChunkContent::Form(ref chunks) => {
chunks.iter().fold(0,
|acc, chunk| acc + chunk.content_len() + CHUNK_HEADER_LEN as i32)
}
ChunkContent::Gen8(ref vec) => vec.len() as i32,
ChunkContent::Optn(ref vec) => vec.len() as i32,
ChunkContent::Extn(ref vec) => vec.len() as i32,
ChunkContent::Sound(ref vec) => vec.len() as i32,
ChunkContent::Agrp(ref vec) => vec.len() as i32,
ChunkContent::Sprite(ref vec) => vec.len() as i32,
ChunkContent::Background(ref vec) => vec.len() as i32,
ChunkContent::Path(ref vec) => vec.len() as i32,
ChunkContent::Script(ref vec) => vec.len() as i32,
ChunkContent::Shader(ref vec) => vec.len() as i32,
ChunkContent::Font(ref vec) => vec.len() as i32,
ChunkContent::Timeline(ref vec) => vec.len() as i32,
ChunkContent::Object(ref vec) => vec.len() as i32,
ChunkContent::Room(ref vec) => vec.len() as i32,
ChunkContent::Dafl(ref vec) => vec.len() as i32,
ChunkContent::Tpag(ref vec) => vec.len() as i32,
ChunkContent::Code(ref vec) => vec.len() as i32,
ChunkContent::Vari(ref vec) => vec.len() as i32,
ChunkContent::Function(ref vec) => vec.len() as i32,
ChunkContent::StringTable(ref table) => {
let mut lengths = 0;
for s in &table.strings {
// + 1 for null terminator
lengths += s.len() + 1;
}
// +4 at end for zero padding
(4 + (table.offsets.len() * 4) + lengths + 4) as i32
}
ChunkContent::Txtr(ref vec) => vec.len() as i32,
ChunkContent::Audio(ref vec) => vec.len() as i32,
}
}
}
Also write ... string denominator... thing
#![feature(read_exact)]
#[macro_use]
extern crate quick_error;
extern crate byteorder;
use std::path::Path;
use std::fs::File;
use std::io::{self, BufReader, BufWriter};
use std::io::prelude::*;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
quick_error! {
#[derive(Debug)]
pub enum StringReadError {
Io(err: io::Error) {
from()
}
ByteOrder(err: byteorder::Error) {
from()
}
MissingNullTerminator {}
}
}
#[derive(Debug, Clone)]
pub struct StringTable {
pub offsets: Vec<u32>,
pub strings: Vec<String>,
}
#[derive(Debug)]
pub enum ChunkContent {
Form(Vec<Chunk>),
Gen8(Vec<u8>),
Optn(Vec<u8>),
Extn(Vec<u8>),
Sound(Vec<u8>),
Agrp(Vec<u8>),
Sprite(Vec<u8>),
Background(Vec<u8>),
Path(Vec<u8>),
Script(Vec<u8>),
Shader(Vec<u8>),
Font(Vec<u8>),
Timeline(Vec<u8>),
Object(Vec<u8>),
Room(Vec<u8>),
Dafl(Vec<u8>),
Tpag(Vec<u8>),
Code(Vec<u8>),
Vari(Vec<u8>),
Function(Vec<u8>),
StringTable(StringTable),
Txtr(Vec<u8>),
Audio(Vec<u8>),
}
#[derive(Debug)]
pub struct Chunk {
pub content: ChunkContent,
pub size: i32,
}
pub const TYPE_ID_LEN: usize = 4;
pub const CHUNK_HEADER_LEN: usize = TYPE_ID_LEN + 4;
quick_error! {
#[derive(Debug)]
pub enum LoadError {
Io(err: io::Error) {
from()
}
ByteOrder(err: byteorder::Error) {
from()
}
String(err: StringReadError) {
from()
}
}
}
fn read_into_byte_vec<R: Read>(reader: &mut R, len: usize) -> Result<Vec<u8>, io::Error> {
let mut vec = Vec::with_capacity(len);
unsafe {
vec.set_len(len);
try!(reader.read_exact(&mut vec));
}
Ok(vec)
}
fn read_string<R: Read>(reader: &mut R) -> Result<String, StringReadError> {
let len = try!(reader.read_u32::<LittleEndian>());
let mut buf = Vec::with_capacity(len as usize);
unsafe {
buf.set_len(len as usize);
try!(reader.read_exact(&mut buf));
}
let terminator = try!(reader.read_u8());
if terminator == 0 {
// We assume strings are valid UTF-8, if not, panic.
Ok(String::from_utf8(buf).unwrap())
} else {
Err(StringReadError::MissingNullTerminator)
}
}
fn read_chunk<R: Read>(reader: &mut R) -> Result<Chunk, LoadError> {
let mut type_id = [0u8; TYPE_ID_LEN];
try!(reader.read_exact(&mut type_id));
let size = try!(reader.read_i32::<LittleEndian>());
let content = match &type_id {
b"FORM" => {
let mut chunks = Vec::new();
let mut bytes_left = size;
while bytes_left > 0 {
let chunk = try!(read_chunk(reader));
bytes_left -= chunk.size + CHUNK_HEADER_LEN as i32;
chunks.push(chunk);
}
ChunkContent::Form(chunks)
}
b"GEN8" => {
ChunkContent::Gen8(try!(read_into_byte_vec(reader, size as usize)))
}
b"OPTN" => {
ChunkContent::Optn(try!(read_into_byte_vec(reader, size as usize)))
}
b"EXTN" => {
ChunkContent::Extn(try!(read_into_byte_vec(reader, size as usize)))
}
b"SOND" => {
ChunkContent::Sound(try!(read_into_byte_vec(reader, size as usize)))
}
b"AGRP" => {
ChunkContent::Agrp(try!(read_into_byte_vec(reader, size as usize)))
}
b"SPRT" => {
ChunkContent::Sprite(try!(read_into_byte_vec(reader, size as usize)))
}
b"BGND" => {
ChunkContent::Background(try!(read_into_byte_vec(reader, size as usize)))
}
b"PATH" => {
ChunkContent::Path(try!(read_into_byte_vec(reader, size as usize)))
}
b"SCPT" => {
ChunkContent::Script(try!(read_into_byte_vec(reader, size as usize)))
}
b"SHDR" => {
ChunkContent::Shader(try!(read_into_byte_vec(reader, size as usize)))
}
b"FONT" => {
ChunkContent::Font(try!(read_into_byte_vec(reader, size as usize)))
}
b"TMLN" => {
ChunkContent::Timeline(try!(read_into_byte_vec(reader, size as usize)))
}
b"OBJT" => {
ChunkContent::Object(try!(read_into_byte_vec(reader, size as usize)))
}
b"ROOM" => {
ChunkContent::Room(try!(read_into_byte_vec(reader, size as usize)))
}
b"DAFL" => {
ChunkContent::Dafl(try!(read_into_byte_vec(reader, size as usize)))
}
b"TPAG" => {
ChunkContent::Tpag(try!(read_into_byte_vec(reader, size as usize)))
}
b"CODE" => {
ChunkContent::Code(try!(read_into_byte_vec(reader, size as usize)))
}
b"VARI" => {
ChunkContent::Vari(try!(read_into_byte_vec(reader, size as usize)))
}
b"FUNC" => {
ChunkContent::Function(try!(read_into_byte_vec(reader, size as usize)))
}
b"STRG" => {
let count = try!(reader.read_u32::<LittleEndian>());
let mut offsets = Vec::with_capacity(count as usize);
for _ in 0..count {
let offset = try!(reader.read_u32::<LittleEndian>());
offsets.push(offset);
}
let mut strings = Vec::with_capacity(count as usize);
for _ in 0..count {
let string = try!(read_string(reader));
strings.push(string);
}
// TODO: Why do we need to consume additional 4 bytes?
// Looks like 4 zero bytes.
let mut buf = [0u8; 4];
try!(reader.read_exact(&mut buf));
ChunkContent::StringTable(StringTable {
offsets: offsets,
strings: strings,
})
}
b"TXTR" => {
ChunkContent::Txtr(try!(read_into_byte_vec(reader, size as usize)))
}
b"AUDO" => {
ChunkContent::Audio(try!(read_into_byte_vec(reader, size as usize)))
}
_ => panic!("Unknown type id \"{}\"", String::from_utf8_lossy(&type_id)),
};
Ok(Chunk {
content: content,
size: size,
})
}
fn write_chunk<W: Write>(writer: &mut W, chunk: &Chunk) -> Result<(), io::Error> {
match chunk.content {
ChunkContent::Form(ref chunks) => {
try!(writer.write_all(b"FORM"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
for chunk in chunks.iter() {
try!(write_chunk(writer, chunk));
}
Ok(())
}
ChunkContent::Gen8(ref vec) => {
try!(writer.write_all(b"GEN8"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Optn(ref vec) => {
try!(writer.write_all(b"OPTN"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Extn(ref vec) => {
try!(writer.write_all(b"EXTN"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Sound(ref vec) => {
try!(writer.write_all(b"SOND"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Agrp(ref vec) => {
try!(writer.write_all(b"AGRP"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Sprite(ref vec) => {
try!(writer.write_all(b"SPRT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Background(ref vec) => {
try!(writer.write_all(b"BGND"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Path(ref vec) => {
try!(writer.write_all(b"PATH"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Script(ref vec) => {
try!(writer.write_all(b"SCPT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Shader(ref vec) => {
try!(writer.write_all(b"SHDR"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Font(ref vec) => {
try!(writer.write_all(b"FONT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Timeline(ref vec) => {
try!(writer.write_all(b"TMLN"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Object(ref vec) => {
try!(writer.write_all(b"OBJT"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Room(ref vec) => {
try!(writer.write_all(b"ROOM"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Dafl(ref vec) => {
try!(writer.write_all(b"DAFL"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Tpag(ref vec) => {
try!(writer.write_all(b"TPAG"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Code(ref vec) => {
try!(writer.write_all(b"CODE"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Vari(ref vec) => {
try!(writer.write_all(b"VARI"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Function(ref vec) => {
try!(writer.write_all(b"FUNC"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::StringTable(ref table) => {
try!(writer.write_all(b"STRG"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_u32::<LittleEndian>(table.strings.len() as u32));
for offs in &table.offsets {
try!(writer.write_u32::<LittleEndian>(*offs));
}
for string in &table.strings {
try!(writer.write_u32::<LittleEndian>(string.len() as u32));
try!(writer.write_all(string.as_bytes()));
try!(writer.write_u8(0));
}
// Required padding
try!(writer.write_all(&[0u8; 4]));
Ok(())
}
ChunkContent::Txtr(ref vec) => {
try!(writer.write_all(b"TXTR"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
ChunkContent::Audio(ref vec) => {
try!(writer.write_all(b"AUDO"));
try!(writer.write_i32::<LittleEndian>(chunk.content_len()));
try!(writer.write_all(vec));
Ok(())
}
}
}
pub fn load_from_file<P: AsRef<Path>>(path: P) -> Result<Chunk, LoadError> {
let file = try!(File::open(path));
let mut reader = BufReader::new(file);
read_chunk(&mut reader)
}
impl Chunk {
pub fn save_to_file<P: AsRef<Path>>(&self, path: P) -> Result<(), io::Error> {
let file = try!(File::create(path));
let mut writer = BufWriter::new(file);
write_chunk(&mut writer, self)
}
pub fn content_len(&self) -> i32 {
match self.content {
ChunkContent::Form(ref chunks) => {
chunks.iter().fold(0,
|acc, chunk| acc + chunk.content_len() + CHUNK_HEADER_LEN as i32)
}
ChunkContent::Gen8(ref vec) => vec.len() as i32,
ChunkContent::Optn(ref vec) => vec.len() as i32,
ChunkContent::Extn(ref vec) => vec.len() as i32,
ChunkContent::Sound(ref vec) => vec.len() as i32,
ChunkContent::Agrp(ref vec) => vec.len() as i32,
ChunkContent::Sprite(ref vec) => vec.len() as i32,
ChunkContent::Background(ref vec) => vec.len() as i32,
ChunkContent::Path(ref vec) => vec.len() as i32,
ChunkContent::Script(ref vec) => vec.len() as i32,
ChunkContent::Shader(ref vec) => vec.len() as i32,
ChunkContent::Font(ref vec) => vec.len() as i32,
ChunkContent::Timeline(ref vec) => vec.len() as i32,
ChunkContent::Object(ref vec) => vec.len() as i32,
ChunkContent::Room(ref vec) => vec.len() as i32,
ChunkContent::Dafl(ref vec) => vec.len() as i32,
ChunkContent::Tpag(ref vec) => vec.len() as i32,
ChunkContent::Code(ref vec) => vec.len() as i32,
ChunkContent::Vari(ref vec) => vec.len() as i32,
ChunkContent::Function(ref vec) => vec.len() as i32,
ChunkContent::StringTable(ref table) => {
let mut lengths = 0;
for s in &table.strings {
// The length denominator before the string
lengths += 4;
// + 1 for null terminator
lengths += s.len() + 1;
}
// +4 at end for zero padding
(4 + (table.offsets.len() * 4) + lengths + 4) as i32
}
ChunkContent::Txtr(ref vec) => vec.len() as i32,
ChunkContent::Audio(ref vec) => vec.len() as i32,
}
}
}
|
extern crate libc;
extern crate notify;
extern crate libloading;
extern crate tempdir;
use libloading::Library;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::mpsc::{channel, Receiver, Sender};
use notify::{RecommendedWatcher, Watcher, Event};
use tempdir::TempDir;
use std::time::Duration;
use std::thread;
use std::fs;
use std::env;
use std::fmt::Write;
pub struct Lib {
pub lib: Library,
pub loaded_path: PathBuf,
pub original_path: Option<PathBuf>,
}
pub struct DynamicReload {
libs: Vec<Rc<Lib>>,
watcher: Option<RecommendedWatcher>,
shadow_dir: Option<TempDir>,
search_paths: Vec<&'static str>,
watch_recv: Receiver<Event>,
}
pub enum Search {
Default,
Backwards,
}
#[derive(PartialEq)]
pub enum UsePlatformName {
No,
Yes,
}
// Test
impl DynamicReload {
///
///
pub fn new(search_paths: Option<Vec<&'static str>>,
shadow_dir: Option<&'static str>,
_search: Search)
-> DynamicReload {
let (tx, rx) = channel();
DynamicReload {
libs: Vec::new(),
watcher: Self::get_watcher(tx),
shadow_dir: Self::get_temp_dir(shadow_dir),
watch_recv: rx,
search_paths: Self::get_search_paths(search_paths),
}
}
///
/// Add a library to be loaded and to be reloaded once updated.
/// If UsePlatformName is set to Yes the input name will be formatted according
/// to the standard way libraries looks on that platform examples:
///
/// ```ignore
/// Windows: foobar -> foobar.dll
/// Linux: foobar -> libfoobar.so
/// Mac: foobar -> libfoobar.dylib
/// ````
///
/// If set to no the given inputname will be used as is. This function
/// will also search for the file in this priority order
///
///
/// ```ignore
/// 1. Current directory
/// 2. In the search paths (relative to currect directory)
/// 3. Currect directory of the executable
/// 4. Serach backwards from executable if Backwards has been set in [new](struct.DynamicReload.html#method.new)
/// ```
///
pub fn add_library(&mut self,
name: &str,
name_format: UsePlatformName)
-> Result<Rc<Lib>, String> {
match Self::try_load_library(self, name, name_format) {
Ok(lib) => {
if let Some(w) = self.watcher.as_mut() {
if let Some(path) = lib.original_path.as_ref() {
let _ = w.watch(path);
}
}
// Bump the ref here as we keep one around to keep track of files that needs to be reloaded
self.libs.push(lib.clone());
Ok(lib)
}
Err(e) => Err(e),
}
}
///
/// Update the will check if a dynamic library needs to be reloaded and if that is the case
/// then the supplied callback functions will be called
///
/// First the before_reload code. This allows the calling application to performe actions
/// before the dynamic library is unloaded. This can for example be to save some internal
/// state that needs to be restorted when relodaded
///
/// After the reloading is comple the post_reload function will be called giving the host
/// application the possibility to restore any potentially saved state
///
/// If no callbacks are needed use the regular [update](struct.DynamicReload.html#method.update) call instead
///
pub fn update_with_callback<F, T>(&mut self,
ref before_update: F,
ref after_update: F,
data: &mut T)
where F: Fn(&mut T, &Rc<Lib>)
{
match self.watch_recv.try_recv() {
Ok(file) => {
Self::reload_libs(self,
file.path.as_ref().unwrap(),
before_update,
after_update,
data)
}
_ => (),
}
}
///
/// Updates the DynamicReload handler and reloads the dynamic libraries if needed
///
///
pub fn update(&self) {}
fn reload_libs<F, T>(&mut self,
file_path: &PathBuf,
ref before_update: F,
ref after_update: F,
data: &mut T)
where F: Fn(&mut T, &Rc<Lib>)
{
let len = self.libs.len();
for i in (0..len).rev() {
if Self::should_reload(file_path, &self.libs[i]) {
Self::reload_lib(self, i, file_path, before_update, after_update, data);
}
}
}
fn reload_lib<F, T>(&mut self,
index: usize,
file_path: &PathBuf,
ref before_update: F,
ref after_update: F,
data: &mut T)
where F: Fn(&mut T, &Rc<Lib>)
{
before_update(data, &self.libs[index]);
self.libs.swap_remove(index);
match Self::load_library(self, file_path) {
Ok(lib) => {
self.libs.push(lib.clone());
after_update(data, &lib);
}
// What should we really do here?
Err(err) => {
println!("Unable to reload lib {:?} err {:?}", file_path, err);
}
}
}
fn try_load_library(&self,
name: &str,
name_format: UsePlatformName)
-> Result<Rc<Lib>, String> {
if let Some(path) = Self::search_dirs(self, name, name_format) {
Self::load_library(self, &path)
} else {
let mut t = "Unable to find ".to_string();
t.push_str(name);
Err(t)
}
}
fn load_library(&self, full_path: &PathBuf) -> Result<Rc<Lib>, String> {
let path;
let original_path;
if let Some(sd) = self.shadow_dir.as_ref() {
path = sd.path().join(full_path.file_name().unwrap());
if !Self::try_copy(&full_path, &path) {
let mut error = "".to_string();
write!(error, "Unable to copy {:?} to {:?}", full_path, path).unwrap();
return Err(error);
}
original_path = Some(full_path.clone());
} else {
original_path = None;
path = full_path.clone();
}
Self::init_library(original_path, path)
}
fn init_library(org_path: Option<PathBuf>, path: PathBuf) -> Result<Rc<Lib>, String> {
match Library::new(&path) {
Ok(l) => {
Ok(Rc::new(Lib {
original_path: org_path,
loaded_path: path,
lib: l,
}))
}
Err(e) => {
let mut error = "".to_string();
write!(error, "Unable to load library {:?}", e).unwrap();
Err(error)
}
}
}
fn should_reload(reload_path: &PathBuf, lib: &Lib) -> bool {
if let Some(p) = lib.original_path.as_ref() {
if reload_path.to_str().unwrap().contains(p.to_str().unwrap()) {
return true;
}
}
false
}
fn search_dirs(&self, name: &str, name_format: UsePlatformName) -> Option<PathBuf> {
let lib_name = Self::get_library_name(name, name_format);
// 1. Serach the current directory
if let Some(path) = Self::search_current_dir(&lib_name) {
return Some(path);
}
// 2. search the relative paths
if let Some(path) = Self::search_relative_paths(self, &lib_name) {
return Some(path);
}
// 3. searches in the executable dir and then backwards
Self::search_backwards_from_exe(&lib_name)
}
fn search_current_dir(name: &String) -> Option<PathBuf> {
Self::is_file(&Path::new(name).to_path_buf())
}
fn search_relative_paths(&self, name: &String) -> Option<PathBuf> {
for p in self.search_paths.iter() {
let path = Path::new(p).join(name);
if let Some(file) = Self::is_file(&path) {
return Some(file);
}
}
None
}
fn get_parent_dir(path: &PathBuf) -> Option<PathBuf> {
match path.parent() {
Some(p) => Some(p.to_path_buf()),
_ => None,
}
}
fn search_backwards_from_file(path: &PathBuf, lib_name: &String) -> Option<PathBuf> {
match Self::get_parent_dir(path) {
Some(p) => {
let new_path = Path::new(&p).join(lib_name);
if Self::is_file(&new_path).is_some() {
return Some(new_path);
}
Self::search_backwards_from_file(&p, lib_name)
}
_ => None,
}
}
fn search_backwards_from_exe(lib_name: &String) -> Option<PathBuf> {
let exe_path = env::current_exe().unwrap_or(PathBuf::new());
Self::search_backwards_from_file(&exe_path, lib_name)
}
fn get_temp_dir(shadow_dir: Option<&str>) -> Option<TempDir> {
match shadow_dir {
Some(dir) => {
match TempDir::new_in(dir, "shadow_libs") {
Ok(td) => Some(td),
Err(er) => {
println!("Unable to create tempdir {}", er);
None
}
}
}
_ => None,
}
}
fn is_file(path: &PathBuf) -> Option<PathBuf> {
match fs::metadata(path) {
Ok(md) => {
if md.is_file() {
Some(path.clone())
} else {
None
}
}
_ => None,
}
}
// In some cases when a file has been set that it's reloaded it's actually not possible
// to read from it directly so this code does some testing first to ensure we
// can actually read from it (by using metadata which does a stat on the file)
// If we can't read from it we wait for 100 ms before we try again, if we can't
// do it with in 1 sec we give up
//
fn try_copy(src: &Path, dest: &Path) -> bool {
for _ in 0..10 {
match fs::metadata(src) {
Ok(file) => {
let len = file.len();
if len > 0 {
fs::copy(&src, &dest).unwrap();
// println!("Copy from {} {}", src.to_str().unwrap(), dest.to_str().unwrap());
return true;
}
}
_ => (),
}
thread::sleep(Duration::from_millis(100));
}
false
}
fn get_watcher(tx: Sender<Event>) -> Option<RecommendedWatcher> {
match Watcher::new(tx) {
Ok(watcher) => Some(watcher),
Err(e) => {
println!("Unable to create file watcher, no dynamic reloading will be done, \
error: {:?}",
e);
None
}
}
}
pub fn get_search_paths(search_paths: Option<Vec<&'static str>>) -> Vec<&'static str> {
match search_paths {
Some(paths) => paths.clone(),
None => Vec::new(),
}
}
fn get_library_name(name: &str, name_format: UsePlatformName) -> String {
if name_format == UsePlatformName::Yes {
Self::get_dynamiclib_name(name)
} else {
name.to_string()
}
}
/// Formats dll name on Windows ("test_foo" -> "test_foo.dll")
#[cfg(target_os="windows")]
fn get_dynamiclib_name(name: &str) -> String {
format!("{}.dll", name)
}
/// Formats dll name on Mac ("test_foo" -> "libtest_foo.dylib")
#[cfg(target_os="macos")]
fn get_dynamiclib_name(name: &str) -> String {
format!("lib{}.dylib", name)
}
/// Formats dll name on *nix ("test_foo" -> "libtest_foo.so")
#[cfg(any(target_os="linux",
target_os="freebsd",
target_os="dragonfly",
target_os="netbsd",
target_os="openbsd"))]
fn get_dynamiclib_name(name: &str) -> String {
format!("lib{}.so", name)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::process::Command;
use std::sync::mpsc::channel;
use std::path::Path;
use std::env;
fn compile_test_shared_lib() {
let exe_path = env::current_exe().unwrap();
let lib_path = exe_path.parent().unwrap();
let lib_name = "test_shared";
let lib_full_path = Path::new(&lib_path).join(DynamicReload::get_dynamiclib_name(lib_name));
// Only run if file doesn't exsits
if DynamicReload::is_file(&lib_full_path).is_none() {
Command::new("rustc")
.arg("src/test_shared.rs")
.arg("--crate-name")
.arg(&lib_name)
.arg("--crate-type")
.arg("dylib")
.arg("--out-dir")
.arg(&lib_path)
.output()
.unwrap_or_else(|e| panic!("failed to execute process: {}", e));
}
}
#[test]
fn test_search_paths_none() {
assert_eq!(DynamicReload::get_search_paths(None).len(), 0);
}
#[test]
fn test_search_paths_some() {
assert_eq!(DynamicReload::get_search_paths(Some(vec!["test", "test"])).len(),
2);
}
#[test]
fn test_get_watcher() {
let (tx, _) = channel();
// We expect this to always work
assert!(DynamicReload::get_watcher(tx).is_some());
}
#[test]
fn test_get_temp_dir_fail() {
assert!(DynamicReload::get_temp_dir(Some("_no_such_dir")).is_none());
}
#[test]
fn test_get_temp_dir_none() {
assert!(DynamicReload::get_temp_dir(None).is_none());
}
#[test]
fn test_get_temp_dir_ok() {
assert!(DynamicReload::get_temp_dir(Some("")).is_some());
}
#[test]
fn test_is_file_fail() {
assert!(DynamicReload::is_file(&Path::new("haz_no_file_with_this_name").to_path_buf())
.is_none());
}
#[test]
fn test_is_file_ok() {
assert!(DynamicReload::is_file(&env::current_exe().unwrap()).is_some());
}
#[test]
#[cfg(target_os="macos")]
fn test_get_library_name_mac() {
assert_eq!(DynamicReload::get_library_name("foobar", UsePlatformName::Yes),
"libfoobar.dylib");
}
#[test]
fn test_get_library_name() {
assert_eq!(DynamicReload::get_library_name("foobar", UsePlatformName::No),
"foobar");
}
#[test]
fn test_search_backwards_from_file_ok() {
// While this rely on that we have a Cargo project it should be fine
assert!(DynamicReload::search_backwards_from_exe(&"Cargo.toml".to_string()).is_some());
}
#[test]
fn test_search_backwards_from_file_fail() {
assert!(DynamicReload::search_backwards_from_exe(&"_no_such_file".to_string()).is_none());
}
#[test]
fn test_add_library_fail() {
let mut dr = DynamicReload::new(None, None, Search::Default);
assert!(dr.add_library("wont_find_this_lib", UsePlatformName::No).is_err());
}
#[test]
fn test_add_shared_lib_ok() {
compile_test_shared_lib();
let mut dr = DynamicReload::new(None, None, Search::Default);
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
}
#[test]
fn test_add_shared_lib_search_paths() {
compile_test_shared_lib();
let mut dr = DynamicReload::new(Some(vec!["../..", "../test"]), None, Search::Default);
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
}
#[test]
fn test_add_shared_shadow_dir_ok() {
compile_test_shared_lib();
let mut dr = DynamicReload::new(None, Some("target/debug"), Search::Default);
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
}
}
Simplified code + testing watch and more.
extern crate libc;
extern crate notify;
extern crate libloading;
extern crate tempdir;
use libloading::Library;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::mpsc::{channel, Receiver, Sender};
use notify::{RecommendedWatcher, Watcher, Event};
use tempdir::TempDir;
use std::time::Duration;
use std::thread;
use std::fs;
use std::env;
use std::fmt::Write;
pub struct Lib {
pub lib: Library,
pub loaded_path: PathBuf,
pub original_path: Option<PathBuf>,
}
pub struct DynamicReload {
libs: Vec<Rc<Lib>>,
watcher: Option<RecommendedWatcher>,
shadow_dir: Option<TempDir>,
search_paths: Vec<&'static str>,
watch_recv: Receiver<Event>,
}
pub enum Search {
Default,
Backwards,
}
#[derive(PartialEq)]
pub enum UsePlatformName {
No,
Yes,
}
// Test
impl DynamicReload {
///
///
pub fn new(search_paths: Option<Vec<&'static str>>,
shadow_dir: Option<&'static str>,
_search: Search)
-> DynamicReload {
let (tx, rx) = channel();
DynamicReload {
libs: Vec::new(),
watcher: Self::get_watcher(tx),
shadow_dir: Self::get_temp_dir(shadow_dir),
watch_recv: rx,
search_paths: Self::get_search_paths(search_paths),
}
}
///
/// Add a library to be loaded and to be reloaded once updated.
/// If UsePlatformName is set to Yes the input name will be formatted according
/// to the standard way libraries looks on that platform examples:
///
/// ```ignore
/// Windows: foobar -> foobar.dll
/// Linux: foobar -> libfoobar.so
/// Mac: foobar -> libfoobar.dylib
/// ````
///
/// If set to no the given inputname will be used as is. This function
/// will also search for the file in this priority order
///
///
/// ```ignore
/// 1. Current directory
/// 2. In the search paths (relative to currect directory)
/// 3. Currect directory of the executable
/// 4. Serach backwards from executable if Backwards has been set in [new](struct.DynamicReload.html#method.new)
/// ```
///
pub fn add_library(&mut self,
name: &str,
name_format: UsePlatformName)
-> Result<Rc<Lib>, String> {
match Self::try_load_library(self, name, name_format) {
Ok(lib) => {
if let Some(w) = self.watcher.as_mut() {
if let Some(path) = lib.original_path.as_ref() {
let _ = w.watch(path);
}
}
// Bump the ref here as we keep one around to keep track of files that needs to be reloaded
self.libs.push(lib.clone());
Ok(lib)
}
Err(e) => Err(e),
}
}
///
/// Update the will check if a dynamic library needs to be reloaded and if that is the case
/// then the supplied callback functions will be called
///
/// First the before_reload code. This allows the calling application to performe actions
/// before the dynamic library is unloaded. This can for example be to save some internal
/// state that needs to be restorted when relodaded
///
/// After the reloading is comple the post_reload function will be called giving the host
/// application the possibility to restore any potentially saved state
///
/// If no callbacks are needed use the regular [update](struct.DynamicReload.html#method.update) call instead
///
pub fn update_with_callback<F, T>(&mut self,
ref update_call: F,
data: &mut T)
where F: Fn(&mut T, bool, &Rc<Lib>)
{
match self.watch_recv.try_recv() {
Ok(file) => {
Self::reload_libs(self,
file.path.as_ref().unwrap(),
update_call,
data)
}
_ => (),
}
}
///
/// Updates the DynamicReload handler and reloads the dynamic libraries if needed
///
///
pub fn update(&self) {}
fn reload_libs<F, T>(&mut self,
file_path: &PathBuf,
ref update_call: F,
data: &mut T)
where F: Fn(&mut T, bool, &Rc<Lib>)
{
let len = self.libs.len();
for i in (0..len).rev() {
if Self::should_reload(file_path, &self.libs[i]) {
Self::reload_lib(self, i, file_path, update_call, data);
}
}
}
fn reload_lib<F, T>(&mut self,
index: usize,
file_path: &PathBuf,
ref update_call: F,
data: &mut T)
where F: Fn(&mut T, bool, &Rc<Lib>)
{
update_call(data, true, &self.libs[index]);
self.libs.swap_remove(index);
match Self::load_library(self, file_path) {
Ok(lib) => {
self.libs.push(lib.clone());
update_call(data, false, &lib);
}
// What should we really do here?
Err(err) => {
println!("Unable to reload lib {:?} err {:?}", file_path, err);
}
}
}
fn try_load_library(&self,
name: &str,
name_format: UsePlatformName)
-> Result<Rc<Lib>, String> {
if let Some(path) = Self::search_dirs(self, name, name_format) {
Self::load_library(self, &path)
} else {
let mut t = "Unable to find ".to_string();
t.push_str(name);
Err(t)
}
}
fn load_library(&self, full_path: &PathBuf) -> Result<Rc<Lib>, String> {
let path;
let original_path;
if let Some(sd) = self.shadow_dir.as_ref() {
path = sd.path().join(full_path.file_name().unwrap());
if !Self::try_copy(&full_path, &path) {
let mut error = "".to_string();
write!(error, "Unable to copy {:?} to {:?}", full_path, path).unwrap();
return Err(error);
}
original_path = Some(full_path.clone());
} else {
original_path = None;
path = full_path.clone();
}
Self::init_library(original_path, path)
}
fn init_library(org_path: Option<PathBuf>, path: PathBuf) -> Result<Rc<Lib>, String> {
match Library::new(&path) {
Ok(l) => {
Ok(Rc::new(Lib {
original_path: org_path,
loaded_path: path,
lib: l,
}))
}
Err(e) => {
let mut error = "".to_string();
write!(error, "Unable to load library {:?}", e).unwrap();
Err(error)
}
}
}
fn should_reload(reload_path: &PathBuf, lib: &Lib) -> bool {
if let Some(p) = lib.original_path.as_ref() {
if reload_path.to_str().unwrap().contains(p.to_str().unwrap()) {
return true;
}
}
false
}
fn search_dirs(&self, name: &str, name_format: UsePlatformName) -> Option<PathBuf> {
let lib_name = Self::get_library_name(name, name_format);
// 1. Serach the current directory
if let Some(path) = Self::search_current_dir(&lib_name) {
return Some(path);
}
// 2. search the relative paths
if let Some(path) = Self::search_relative_paths(self, &lib_name) {
return Some(path);
}
// 3. searches in the executable dir and then backwards
Self::search_backwards_from_exe(&lib_name)
}
fn search_current_dir(name: &String) -> Option<PathBuf> {
Self::is_file(&Path::new(name).to_path_buf())
}
fn search_relative_paths(&self, name: &String) -> Option<PathBuf> {
for p in self.search_paths.iter() {
let path = Path::new(p).join(name);
if let Some(file) = Self::is_file(&path) {
return Some(file);
}
}
None
}
fn get_parent_dir(path: &PathBuf) -> Option<PathBuf> {
match path.parent() {
Some(p) => Some(p.to_path_buf()),
_ => None,
}
}
fn search_backwards_from_file(path: &PathBuf, lib_name: &String) -> Option<PathBuf> {
match Self::get_parent_dir(path) {
Some(p) => {
let new_path = Path::new(&p).join(lib_name);
if Self::is_file(&new_path).is_some() {
return Some(new_path);
}
Self::search_backwards_from_file(&p, lib_name)
}
_ => None,
}
}
fn search_backwards_from_exe(lib_name: &String) -> Option<PathBuf> {
let exe_path = env::current_exe().unwrap_or(PathBuf::new());
Self::search_backwards_from_file(&exe_path, lib_name)
}
fn get_temp_dir(shadow_dir: Option<&str>) -> Option<TempDir> {
match shadow_dir {
Some(dir) => {
match TempDir::new_in(dir, "shadow_libs") {
Ok(td) => Some(td),
Err(er) => {
println!("Unable to create tempdir {}", er);
None
}
}
}
_ => None,
}
}
fn is_file(path: &PathBuf) -> Option<PathBuf> {
match fs::metadata(path) {
Ok(md) => {
if md.is_file() {
Some(path.clone())
} else {
None
}
}
_ => None,
}
}
// In some cases when a file has been set that it's reloaded it's actually not possible
// to read from it directly so this code does some testing first to ensure we
// can actually read from it (by using metadata which does a stat on the file)
// If we can't read from it we wait for 100 ms before we try again, if we can't
// do it with in 1 sec we give up
//
fn try_copy(src: &Path, dest: &Path) -> bool {
for _ in 0..10 {
match fs::metadata(src) {
Ok(file) => {
let len = file.len();
if len > 0 {
fs::copy(&src, &dest).unwrap();
// println!("Copy from {} {}", src.to_str().unwrap(), dest.to_str().unwrap());
return true;
}
}
_ => (),
}
thread::sleep(Duration::from_millis(100));
}
false
}
fn get_watcher(tx: Sender<Event>) -> Option<RecommendedWatcher> {
match Watcher::new(tx) {
Ok(watcher) => Some(watcher),
Err(e) => {
println!("Unable to create file watcher, no dynamic reloading will be done, \
error: {:?}",
e);
None
}
}
}
pub fn get_search_paths(search_paths: Option<Vec<&'static str>>) -> Vec<&'static str> {
match search_paths {
Some(paths) => paths.clone(),
None => Vec::new(),
}
}
fn get_library_name(name: &str, name_format: UsePlatformName) -> String {
if name_format == UsePlatformName::Yes {
Self::get_dynamiclib_name(name)
} else {
name.to_string()
}
}
/// Formats dll name on Windows ("test_foo" -> "test_foo.dll")
#[cfg(target_os="windows")]
fn get_dynamiclib_name(name: &str) -> String {
format!("{}.dll", name)
}
/// Formats dll name on Mac ("test_foo" -> "libtest_foo.dylib")
#[cfg(target_os="macos")]
fn get_dynamiclib_name(name: &str) -> String {
format!("lib{}.dylib", name)
}
/// Formats dll name on *nix ("test_foo" -> "libtest_foo.so")
#[cfg(any(target_os="linux",
target_os="freebsd",
target_os="dragonfly",
target_os="netbsd",
target_os="openbsd"))]
fn get_dynamiclib_name(name: &str) -> String {
format!("lib{}.so", name)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::process::Command;
use std::sync::mpsc::channel;
use std::path::{Path, PathBuf};
use std::env;
use std::thread;
use std::time::Duration;
use std::rc::Rc;
use std::fs;
#[derive(Default)]
struct TestNotifyCallback {
update_call_done: bool,
after_update_done: bool,
}
impl TestNotifyCallback {
fn update_call(&mut self, before: bool, _lib: &Rc<Lib>) {
if before {
self.update_call_done = true;
} else {
self.after_update_done = true;
}
}
}
fn compile_test_shared_lib() -> PathBuf {
let exe_path = env::current_exe().unwrap();
let lib_path = exe_path.parent().unwrap();
let lib_name = "test_shared";
let lib_full_path = Path::new(&lib_path).join(DynamicReload::get_dynamiclib_name(lib_name));
// Only run if file doesn't exsits
if DynamicReload::is_file(&lib_full_path).is_none() {
Command::new("rustc")
.arg("src/test_shared.rs")
.arg("--crate-name")
.arg(&lib_name)
.arg("--crate-type")
.arg("dylib")
.arg("--out-dir")
.arg(&lib_path)
.output()
.unwrap_or_else(|e| panic!("failed to execute process: {}", e));
}
lib_full_path
}
#[test]
fn test_search_paths_none() {
assert_eq!(DynamicReload::get_search_paths(None).len(), 0);
}
#[test]
fn test_search_paths_some() {
assert_eq!(DynamicReload::get_search_paths(Some(vec!["test", "test"])).len(),
2);
}
#[test]
fn test_get_watcher() {
let (tx, _) = channel();
// We expect this to always work
assert!(DynamicReload::get_watcher(tx).is_some());
}
#[test]
fn test_get_temp_dir_fail() {
assert!(DynamicReload::get_temp_dir(Some("_no_such_dir")).is_none());
}
#[test]
fn test_get_temp_dir_none() {
assert!(DynamicReload::get_temp_dir(None).is_none());
}
#[test]
fn test_get_temp_dir_ok() {
assert!(DynamicReload::get_temp_dir(Some("")).is_some());
}
#[test]
fn test_is_file_fail() {
assert!(DynamicReload::is_file(&Path::new("haz_no_file_with_this_name").to_path_buf())
.is_none());
}
#[test]
fn test_is_file_ok() {
assert!(DynamicReload::is_file(&env::current_exe().unwrap()).is_some());
}
#[test]
#[cfg(target_os="macos")]
fn test_get_library_name_mac() {
assert_eq!(DynamicReload::get_library_name("foobar", UsePlatformName::Yes),
"libfoobar.dylib");
}
#[test]
fn test_get_library_name() {
assert_eq!(DynamicReload::get_library_name("foobar", UsePlatformName::No),
"foobar");
}
#[test]
fn test_search_backwards_from_file_ok() {
// While this rely on that we have a Cargo project it should be fine
assert!(DynamicReload::search_backwards_from_exe(&"Cargo.toml".to_string()).is_some());
}
#[test]
fn test_search_backwards_from_file_fail() {
assert!(DynamicReload::search_backwards_from_exe(&"_no_such_file".to_string()).is_none());
}
#[test]
fn test_add_library_fail() {
let mut dr = DynamicReload::new(None, None, Search::Default);
assert!(dr.add_library("wont_find_this_lib", UsePlatformName::No).is_err());
}
#[test]
fn test_add_shared_lib_ok() {
compile_test_shared_lib();
let mut dr = DynamicReload::new(None, None, Search::Default);
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
}
#[test]
fn test_add_shared_lib_search_paths() {
compile_test_shared_lib();
let mut dr = DynamicReload::new(Some(vec!["../..", "../test"]), None, Search::Default);
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
}
#[test]
fn test_add_shared_shadow_dir_ok() {
compile_test_shared_lib();
let mut dr = DynamicReload::new(None, Some("target/debug"), Search::Default);
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
}
#[test]
fn test_add_shared_update_1() {
let mut notify_callback = TestNotifyCallback::default();
let target_path = compile_test_shared_lib();
let mut dest_path = Path::new(&target_path).to_path_buf();
let mut dr = DynamicReload::new(None, Some("target/debug"), Search::Default);
dest_path.set_file_name("test_file");
fs::copy(&target_path, &dest_path).unwrap();
assert!(dr.add_library("test_shared", UsePlatformName::Yes).is_ok());
for i in 0..10 {
dr.update_with_callback(TestNotifyCallback::update_call, &mut notify_callback);
if i == 2 {
fs::copy(&dest_path, &target_path).unwrap();
}
thread::sleep(Duration::from_millis(50));
}
assert!(notify_callback.update_call_done);
assert!(notify_callback.after_update_done);
}
}
|
//! Implementation of the Micro Transport Protocol.[^spec]
//!
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
// __________ ____ ____
// /_ __/ __ \/ __ \/ __ \
// / / / / / / / / / / / /
// / / / /_/ / /_/ / /_/ /
// /_/ \____/_____/\____/
//
// - Lossy UDP socket for testing purposes: send and receive ops are wrappers
// that stochastically drop or reorder packets.
// - Sending FIN on drop
// - Handle packet loss
// - Path MTU discovery (RFC4821)
#![feature(macro_rules)]
#![feature(phase)]
#![feature(if_let)]
#![feature(while_let)]
#![feature(globs)]
#![deny(missing_doc)]
extern crate time;
#[phase(plugin, link)] extern crate log;
use std::io::net::udp::UdpSocket;
use std::io::net::ip::SocketAddr;
use std::io::IoResult;
use std::rand::random;
use std::collections::{DList, Deque};
use util::*;
use packet::*;
mod util;
mod bit_iterator;
mod packet;
// For simplicity's sake, let us assume no packet will ever exceed the
// Ethernet maximum transfer unit of 1500 bytes.
const BUF_SIZE: uint = 1500;
const DELAY_MAX_AGE: u32 = 2 * 60 * 1_000_000;
const GAIN: uint = 1;
const ALLOWED_INCREASE: uint = 1;
const TARGET: uint = 100_000; // 100 milliseconds
const MSS: uint = 1400;
const MIN_CWND: uint = 2;
const INIT_CWND: uint = 2;
macro_rules! iotry(
($e:expr) => (match $e { Ok(e) => e, Err(e) => panic!("{}", e) })
)
#[deriving(PartialEq,Eq,Show)]
enum UtpSocketState {
SocketNew,
SocketConnected,
SocketSynSent,
SocketFinReceived,
SocketFinSent,
SocketResetReceived,
SocketClosed,
SocketEndOfFile,
}
/// A uTP (Micro Transport Protocol) socket.
pub struct UtpSocket {
socket: UdpSocket,
connected_to: SocketAddr,
sender_connection_id: u16,
receiver_connection_id: u16,
seq_nr: u16,
ack_nr: u16,
state: UtpSocketState,
// Received but not acknowledged packets
incoming_buffer: Vec<UtpPacket>,
// Sent but not yet acknowledged packets
send_window: Vec<UtpPacket>,
unsent_queue: DList<UtpPacket>,
duplicate_ack_count: uint,
last_acked: u16,
last_acked_timestamp: u32,
fin_seq_nr: u16,
rtt: int,
rtt_variance: int,
pending_data: Vec<u8>,
curr_window: uint,
remote_wnd_size: uint,
current_delays: Vec<(u32,u32)>,
base_delays: Vec<(u32,u32)>,
congestion_timeout: u32,
cwnd: uint,
}
impl UtpSocket {
/// Create a UTP socket from the given address.
#[unstable]
pub fn bind(addr: SocketAddr) -> IoResult<UtpSocket> {
let skt = UdpSocket::bind(addr);
let connection_id = random::<u16>();
match skt {
Ok(x) => Ok(UtpSocket {
socket: x,
connected_to: addr,
receiver_connection_id: connection_id,
sender_connection_id: connection_id + 1,
seq_nr: 1,
ack_nr: 0,
state: SocketNew,
incoming_buffer: Vec::new(),
send_window: Vec::new(),
unsent_queue: DList::new(),
duplicate_ack_count: 0,
last_acked: 0,
last_acked_timestamp: 0,
fin_seq_nr: 0,
rtt: 0,
rtt_variance: 0,
pending_data: Vec::new(),
curr_window: 0,
remote_wnd_size: 0,
current_delays: Vec::new(),
base_delays: Vec::new(),
congestion_timeout: 1000, // 1 second
cwnd: INIT_CWND * MSS,
}),
Err(e) => Err(e)
}
}
/// Open a uTP connection to a remote host by hostname or IP address.
#[unstable]
pub fn connect(mut self, other: SocketAddr) -> IoResult<UtpSocket> {
use std::io::{IoError, ConnectionFailed};
self.connected_to = other;
assert_eq!(self.receiver_connection_id + 1, self.sender_connection_id);
let mut packet = UtpPacket::new();
packet.set_type(SynPacket);
packet.set_connection_id(self.receiver_connection_id);
packet.set_seq_nr(self.seq_nr);
let mut len = 0;
let mut addr = self.connected_to;
let mut buf = [0, ..BUF_SIZE];
for _ in range(0u, 5) {
packet.set_timestamp_microseconds(now_microseconds());
// Send packet
try!(self.socket.send_to(packet.bytes().as_slice(), other));
self.state = SocketSynSent;
// Validate response
self.socket.set_read_timeout(Some(500));
match self.socket.recv_from(buf) {
Ok((read, src)) => { len = read; addr = src; break; },
Err(ref e) if e.kind == std::io::TimedOut => continue,
Err(e) => panic!("{}", e),
};
}
assert!(len == HEADER_SIZE);
assert!(addr == self.connected_to);
let packet = UtpPacket::decode(buf.slice_to(len));
if packet.get_type() != StatePacket {
return Err(IoError {
kind: ConnectionFailed,
desc: "The remote peer sent an invalid reply",
detail: None,
});
}
self.ack_nr = packet.seq_nr();
self.state = SocketConnected;
self.seq_nr += 1;
debug!("connected to: {}", self.connected_to);
return Ok(self);
}
/// Gracefully close connection to peer.
///
/// This method allows both peers to receive all packets still in
/// flight.
#[unstable]
pub fn close(&mut self) -> IoResult<()> {
// Wait for acknowledgment on pending sent packets
let mut buf = [0u8, ..BUF_SIZE];
while !self.send_window.is_empty() {
iotry!(self.recv_from(buf));
}
let mut packet = UtpPacket::new();
packet.set_connection_id(self.sender_connection_id);
packet.set_seq_nr(self.seq_nr);
packet.set_ack_nr(self.ack_nr);
packet.set_timestamp_microseconds(now_microseconds());
packet.set_type(FinPacket);
// Send FIN
try!(self.socket.send_to(packet.bytes().as_slice(), self.connected_to));
self.state = SocketFinSent;
// Receive JAKE
while self.state != SocketClosed {
match self.recv_from(buf) {
Ok(_) => {},
Err(ref e) if e.kind == std::io::EndOfFile => self.state = SocketClosed,
Err(e) => panic!("{}", e),
};
}
Ok(())
}
/// Receive data from socket.
///
/// On success, returns the number of bytes read and the sender's address.
/// Returns SocketEndOfFile after receiving a FIN packet when the remaining
/// inflight packets are consumed. Subsequent calls return SocketClosed.
#[unstable]
pub fn recv_from(&mut self, buf: &mut[u8]) -> IoResult<(uint,SocketAddr)> {
use std::io::{IoError, EndOfFile, Closed};
if self.state == SocketEndOfFile {
self.state = SocketClosed;
return Err(IoError {
kind: EndOfFile,
desc: "End of file reached",
detail: None,
});
}
if self.state == SocketClosed {
return Err(IoError {
kind: Closed,
desc: "Connection closed",
detail: None,
});
}
match self.flush_incoming_buffer(buf) {
0 => self.recv(buf),
read => Ok((read, self.connected_to)),
}
}
fn recv(&mut self, buf: &mut[u8]) -> IoResult<(uint,SocketAddr)> {
use std::io::{IoError, TimedOut, ConnectionReset};
let mut b = [0, ..BUF_SIZE + HEADER_SIZE];
if self.state != SocketNew {
debug!("setting read timeout of {} ms", self.congestion_timeout);
self.socket.set_read_timeout(Some(self.congestion_timeout as u64));
}
let (read, src) = match self.socket.recv_from(b) {
Err(ref e) if e.kind == TimedOut => {
debug!("recv_from timed out");
self.congestion_timeout = self.congestion_timeout * 2;
self.cwnd = MSS;
self.send_fast_resend_request();
return Ok((0, self.connected_to));
},
Ok(x) => x,
Err(e) => return Err(e),
};
let packet = UtpPacket::decode(b.slice_to(read));
debug!("received {}", packet);
if packet.get_type() == ResetPacket {
return Err(IoError {
kind: ConnectionReset,
desc: "Remote host aborted connection (incorrect connection id)",
detail: None,
});
}
if packet.get_type() == SynPacket {
self.connected_to = src;
}
let shallow_clone = packet.shallow_clone();
if packet.get_type() == DataPacket && self.ack_nr + 1 <= packet.seq_nr() {
self.insert_into_buffer(packet);
}
if let Some(pkt) = self.handle_packet(shallow_clone) {
let mut pkt = pkt;
pkt.set_wnd_size(BUF_SIZE as u32);
try!(self.socket.send_to(pkt.bytes().as_slice(), src));
debug!("sent {}", pkt);
}
// Flush incoming buffer if possible
let read = self.flush_incoming_buffer(buf);
Ok((read, src))
}
#[allow(missing_doc)]
#[deprecated = "renamed to `recv_from`"]
pub fn recvfrom(&mut self, buf: &mut[u8]) -> IoResult<(uint,SocketAddr)> {
self.recv_from(buf)
}
fn prepare_reply(&self, original: &UtpPacket, t: UtpPacketType) -> UtpPacket {
let mut resp = UtpPacket::new();
resp.set_type(t);
let self_t_micro: u32 = now_microseconds();
let other_t_micro: u32 = original.timestamp_microseconds();
resp.set_timestamp_microseconds(self_t_micro);
resp.set_timestamp_difference_microseconds((self_t_micro - other_t_micro));
resp.set_connection_id(self.sender_connection_id);
resp.set_seq_nr(self.seq_nr);
resp.set_ack_nr(self.ack_nr);
resp
}
/// Remove packet in incoming buffer and update current acknowledgement
/// number.
fn advance_incoming_buffer(&mut self) -> Option<UtpPacket> {
match self.incoming_buffer.remove(0) {
Some(packet) => {
debug!("Removed packet from incoming buffer: {}", packet);
self.ack_nr = packet.seq_nr();
Some(packet)
},
None => None
}
}
/// Discards sequential, ordered packets in incoming buffer, starting from
/// the most recently acknowledged to the most recent, as long as there are
/// no missing packets. The discarded packets' payload is written to the
/// slice `buf`, starting in position `start`.
/// Returns the last written index.
fn flush_incoming_buffer(&mut self, buf: &mut [u8]) -> uint {
let mut idx = 0;
// Check if there is any pending data from a partially flushed packet
if !self.pending_data.is_empty() {
let len = buf.clone_from_slice(self.pending_data.as_slice());
// If all the data in the pending data buffer fits the given output
// buffer, remove the corresponding packet from the incoming buffer
// and clear the pending data buffer
if len == self.pending_data.len() {
self.pending_data.clear();
self.advance_incoming_buffer();
return idx + len;
} else {
// Remove the bytes copied to the output buffer from the pending
// data buffer (i.e., pending -= output)
self.pending_data = self.pending_data.slice_from(len).to_vec();
}
}
// Copy the payload of as many packets in the incoming buffer as possible
while !self.incoming_buffer.is_empty() &&
(self.ack_nr == self.incoming_buffer[0].seq_nr() ||
self.ack_nr + 1 == self.incoming_buffer[0].seq_nr())
{
let len = std::cmp::min(buf.len() - idx, self.incoming_buffer[0].payload.len());
for i in range(0, len) {
buf[idx] = self.incoming_buffer[0].payload[i];
idx += 1;
}
// Remove top packet if its payload fits the output buffer
if self.incoming_buffer[0].payload.len() == len {
self.advance_incoming_buffer();
} else {
self.pending_data.push_all(self.incoming_buffer[0].payload.slice_from(len));
}
// Stop if the output buffer is full
if buf.len() == idx {
return idx;
}
}
return idx;
}
/// Send data on socket to the remote peer. Returns nothing on success.
//
// # Implementation details
//
// This method inserts packets into the send buffer and keeps trying to
// advance the send window until an ACK corresponding to the last packet is
// received.
//
// Note that the buffer passed to `send_to` might exceed the maximum packet
// size, which will result in the data being split over several packets.
#[unstable]
pub fn send_to(&mut self, buf: &[u8]) -> IoResult<()> {
use std::io::{IoError, Closed};
if self.state == SocketClosed {
return Err(IoError {
kind: Closed,
desc: "Connection closed",
detail: None,
});
}
for chunk in buf.chunks(MSS - HEADER_SIZE) {
let mut packet = UtpPacket::new();
packet.set_type(DataPacket);
packet.payload = chunk.to_vec();
packet.set_timestamp_microseconds(now_microseconds());
packet.set_seq_nr(self.seq_nr);
packet.set_ack_nr(self.ack_nr);
packet.set_connection_id(self.sender_connection_id);
self.unsent_queue.push(packet);
self.seq_nr += 1;
}
// Flush unsent packet queue
self.send();
// Consume acknowledgements until latest packet
let mut buf = [0, ..BUF_SIZE];
while self.last_acked < self.seq_nr - 1 {
try!(self.recv_from(buf));
}
Ok(())
}
/// Send every packet in the unsent packet queue.
fn send(&mut self) {
let dst = self.connected_to;
while let Some(packet) = self.unsent_queue.pop_front() {
debug!("current window: {}", self.send_window.len());
let max_inflight = std::cmp::min(self.cwnd, self.remote_wnd_size);
let max_inflight = std::cmp::max(MIN_CWND * MSS, max_inflight);
while self.curr_window + packet.len() > max_inflight {
let mut buf = [0, ..BUF_SIZE];
iotry!(self.recv_from(buf));
}
iotry!(self.socket.send_to(packet.bytes().as_slice(), dst));
debug!("sent {}", packet);
self.curr_window += packet.len();
self.send_window.push(packet);
}
}
#[allow(missing_doc)]
#[deprecated = "renamed to `send_to`"]
pub fn sendto(&mut self, buf: &[u8]) -> IoResult<()> {
self.send_to(buf)
}
/// Send fast resend request.
///
/// Sends three identical ACK/STATE packets to the remote host, signalling a
/// fast resend request.
fn send_fast_resend_request(&mut self) {
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_ack_nr(self.ack_nr);
packet.set_seq_nr(self.seq_nr);
packet.set_connection_id(self.sender_connection_id);
for _ in range(0u, 3) {
let t = now_microseconds();
packet.set_timestamp_microseconds(t);
packet.set_timestamp_difference_microseconds((t - self.last_acked_timestamp));
iotry!(self.socket.send_to(packet.bytes().as_slice(), self.connected_to));
debug!("sent {}", packet);
}
}
fn update_base_delay(&mut self, v: u32) {
// Remove measurements more than 2 minutes old
let now = now_microseconds();
while !self.base_delays.is_empty() && now - self.base_delays[0].val0() > DELAY_MAX_AGE {
self.base_delays.remove(0);
}
// Insert new measurement
self.base_delays.push((now_microseconds(), v));
}
fn update_current_delay(&mut self, v: u32) {
// Remove measurements more than 2 minutes old
let now = now_microseconds();
while !self.current_delays.is_empty() && now - self.current_delays[0].val0() > DELAY_MAX_AGE {
self.current_delays.remove(0);
}
// Insert new measurement
self.current_delays.push((now_microseconds(), v));
}
fn update_congestion_timeout(&mut self, current_delay: int) {
let delta = self.rtt - current_delay;
self.rtt_variance += (std::num::abs(delta) - self.rtt_variance) / 4;
self.rtt += (current_delay - self.rtt) / 8;
self.congestion_timeout = std::cmp::max(self.rtt + self.rtt_variance * 4, 500) as u32;
self.congestion_timeout = std::cmp::min(self.congestion_timeout, 60_000);
debug!("current_delay: {}", current_delay);
debug!("delta: {}", delta);
debug!("self.rtt_variance: {}", self.rtt_variance);
debug!("self.rtt: {}", self.rtt);
debug!("self.congestion_timeout: {}", self.congestion_timeout);
}
/// Calculate the filtered current delay in the current window.
///
/// The current delay is calculated through application of the exponential
/// weighted moving average filter with smoothing factor 0.333 over the
/// current delays in the current window.
fn filtered_current_delay(&self) -> u32 {
let input = self.current_delays.iter().map(|&(_,x)| x as f64).collect();
let output = exponential_weighted_moving_average(input, 0.333);
output[output.len() - 1] as u32
}
/// Calculate the lowest base delay in the current window.
fn min_base_delay(&self) -> u32 {
self.base_delays.iter().min().unwrap().val1()
}
/// Build the selective acknowledgment payload for usage in packets.
fn build_selective_ack(&self) -> Vec<u8> {
let mut stashed = self.incoming_buffer.iter()
.filter(|&pkt| pkt.seq_nr() > self.ack_nr);
let mut sack = Vec::new();
for packet in stashed {
let diff = packet.seq_nr() - self.ack_nr - 2;
let byte = (diff / 8) as uint;
let bit = (diff % 8) as uint;
if byte >= sack.len() {
sack.push(0u8);
}
let mut bitarray = sack.pop().unwrap();
bitarray |= 1 << bit;
sack.push(bitarray);
}
// Make sure the amount of elements in the SACK vector is a
// multiple of 4
if sack.len() % 4 != 0 {
let len = sack.len();
sack.grow((len / 4 + 1) * 4 - len, 0);
}
return sack;
}
fn resend_lost_packet(&mut self, lost_packet_nr: u16) {
match self.send_window.iter().find(|pkt| pkt.seq_nr() == lost_packet_nr) {
None => debug!("Packet {} not found", lost_packet_nr),
Some(packet) => {
iotry!(self.socket.send_to(packet.bytes().as_slice(), self.connected_to));
debug!("sent {}", packet);
}
}
}
/// Forget sent packets that were acknowledged by the remote peer.
fn advance_send_window(&mut self) {
if let Some(position) = self.send_window.iter()
.position(|pkt| pkt.seq_nr() == self.last_acked)
{
for _ in range(0, position + 1) {
let packet = self.send_window.remove(0).unwrap();
self.curr_window -= packet.len();
}
}
debug!("self.curr_window: {}", self.curr_window);
}
/// Handle incoming packet, updating socket state accordingly.
///
/// Returns appropriate reply packet, if needed.
fn handle_packet(&mut self, packet: UtpPacket) -> Option<UtpPacket> {
// Reset connection if connection id doesn't match and this isn't a SYN
if packet.get_type() != SynPacket &&
!(packet.connection_id() == self.sender_connection_id ||
packet.connection_id() == self.receiver_connection_id) {
return Some(self.prepare_reply(&packet, ResetPacket));
}
// Acknowledge only if the packet strictly follows the previous one
if self.ack_nr + 1 == packet.seq_nr() {
self.ack_nr = packet.seq_nr();
}
self.remote_wnd_size = packet.wnd_size() as uint;
debug!("self.remote_wnd_size: {}", self.remote_wnd_size);
// Ignore packets with sequence number higher than the one in the FIN packet.
if self.state == SocketFinReceived && self.fin_seq_nr == self.ack_nr &&
packet.seq_nr() > self.fin_seq_nr
{
debug!("Ignoring packet with sequence number {} (higher than FIN: {})",
packet.seq_nr(), self.fin_seq_nr);
return None;
}
match packet.get_type() {
SynPacket => { // Respond with an ACK and populate own fields
// Update socket information for new connections
self.ack_nr = packet.seq_nr();
self.seq_nr = random();
self.receiver_connection_id = packet.connection_id() + 1;
self.sender_connection_id = packet.connection_id();
self.state = SocketConnected;
Some(self.prepare_reply(&packet, StatePacket))
}
DataPacket => {
let mut reply = self.prepare_reply(&packet, StatePacket);
if self.ack_nr + 1 < packet.seq_nr() {
debug!("current ack_nr ({}) is behind received packet seq_nr ({})",
self.ack_nr, packet.seq_nr());
// Set SACK extension payload if the packet is not in order
let sack = self.build_selective_ack();
if sack.len() > 0 {
reply.set_sack(Some(sack));
}
}
Some(reply)
},
FinPacket => {
self.state = SocketFinReceived;
self.fin_seq_nr = packet.seq_nr();
// If all packets are received and handled
if self.pending_data.is_empty() &&
self.incoming_buffer.is_empty() &&
self.ack_nr == self.fin_seq_nr
{
self.state = SocketEndOfFile;
Some(self.prepare_reply(&packet, StatePacket))
} else {
debug!("FIN received but there are missing packets");
None
}
}
StatePacket => {
if packet.ack_nr() == self.last_acked {
self.duplicate_ack_count += 1;
} else {
self.last_acked = packet.ack_nr();
self.last_acked_timestamp = now_microseconds();
self.duplicate_ack_count = 1;
}
self.update_base_delay(packet.timestamp_microseconds());
self.update_current_delay(packet.timestamp_difference_microseconds());
let bytes_newly_acked = packet.len();
let flightsize = self.curr_window;
let queuing_delay = self.filtered_current_delay() - self.min_base_delay();
let target = TARGET as u32;
let off_target: u32 = (target - queuing_delay) / target;
self.cwnd += GAIN * off_target as uint * bytes_newly_acked * MSS / self.cwnd;
let max_allowed_cwnd = flightsize + ALLOWED_INCREASE * MSS;
self.cwnd = std::cmp::min(self.cwnd, max_allowed_cwnd);
self.cwnd = std::cmp::max(self.cwnd, MIN_CWND * MSS);
let rtt = (target - off_target) / 1000; // in milliseconds
self.update_congestion_timeout(rtt as int);
debug!("queuing_delay: {}", queuing_delay);
debug!("off_target: {}", off_target);
debug!("cwnd: {}", self.cwnd);
debug!("max_allowed_cwnd: {}", max_allowed_cwnd);
let mut packet_loss_detected: bool = !self.send_window.is_empty() &&
self.duplicate_ack_count == 3;
// Process extensions, if any
for extension in packet.extensions.iter() {
if extension.get_type() == SelectiveAckExtension {
let bits = extension.iter();
// If three or more packets are acknowledged past the implicit missing one,
// assume it was lost.
if bits.filter(|&bit| bit == 1).count() >= 3 {
self.resend_lost_packet(packet.ack_nr() + 1);
packet_loss_detected = true;
}
let bits = extension.iter();
for (idx, received) in bits.map(|bit| bit == 1).enumerate() {
let seq_nr = packet.ack_nr() + 2 + idx as u16;
if received {
debug!("SACK: packet {} received", seq_nr);
} else if !self.send_window.is_empty() &&
seq_nr < self.send_window.last().unwrap().seq_nr()
{
debug!("SACK: packet {} lost", seq_nr);
self.resend_lost_packet(seq_nr);
packet_loss_detected = true;
} else {
break;
}
}
} else {
debug!("Unknown extension {}, ignoring", extension.get_type());
}
}
// Packet lost, halve the congestion window
if packet_loss_detected {
debug!("packet loss detected, halving congestion window");
self.cwnd = std::cmp::max(self.cwnd / 2, MIN_CWND * MSS);
debug!("cwnd: {}", self.cwnd);
}
// Three duplicate ACKs, must resend packets since `ack_nr + 1`
// TODO: checking if the send buffer isn't empty isn't a
// foolproof way to differentiate between triple-ACK and three
// keep alives spread in time
if !self.send_window.is_empty() && self.duplicate_ack_count == 3 {
for i in range(0, self.send_window.len()) {
let seq_nr = self.send_window[i].seq_nr();
if seq_nr <= packet.ack_nr() { continue; }
self.resend_lost_packet(seq_nr);
}
}
// Success, advance send window
self.advance_send_window();
if self.state == SocketFinSent && packet.ack_nr() == self.seq_nr {
self.state = SocketClosed;
}
None
},
ResetPacket => {
self.state = SocketResetReceived;
None
},
}
}
/// Insert a packet into the socket's buffer.
///
/// The packet is inserted in such a way that the buffer is
/// ordered ascendingly by their sequence number. This allows
/// storing packets that were received out of order.
///
/// Inserting a duplicate of a packet will replace the one in the buffer if
/// it's more recent (larger timestamp).
fn insert_into_buffer(&mut self, packet: UtpPacket) {
let mut i = 0;
for pkt in self.incoming_buffer.iter() {
if pkt.seq_nr() >= packet.seq_nr() {
break;
}
i += 1;
}
if !self.incoming_buffer.is_empty() && i < self.incoming_buffer.len() &&
self.incoming_buffer[i].seq_nr() == packet.seq_nr() {
self.incoming_buffer.remove(i);
}
self.incoming_buffer.insert(i, packet);
}
}
/// Stream interface for UtpSocket.
pub struct UtpStream {
socket: UtpSocket,
}
impl UtpStream {
/// Create a uTP stream listening on the given address.
#[unstable]
pub fn bind(addr: SocketAddr) -> IoResult<UtpStream> {
let socket = UtpSocket::bind(addr);
match socket {
Ok(s) => Ok(UtpStream { socket: s }),
Err(e) => Err(e),
}
}
/// Open a uTP connection to a remote host by hostname or IP address.
#[unstable]
pub fn connect(dst: SocketAddr) -> IoResult<UtpStream> {
use std::io::net::ip::Ipv4Addr;
// Port 0 means the operating system gets to choose it
let my_addr = SocketAddr { ip: Ipv4Addr(0,0,0,0), port: 0 };
let socket = match UtpSocket::bind(my_addr) {
Ok(s) => s,
Err(e) => return Err(e),
};
match socket.connect(dst) {
Ok(socket) => Ok(UtpStream { socket: socket }),
Err(e) => Err(e),
}
}
/// Gracefully close connection to peer.
///
/// This method allows both peers to receive all packets still in
/// flight.
#[unstable]
pub fn close(&mut self) -> IoResult<()> {
self.socket.close()
}
}
impl Reader for UtpStream {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
match self.socket.recv_from(buf) {
Ok((read, _src)) => Ok(read),
Err(e) => Err(e),
}
}
}
impl Writer for UtpStream {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
self.socket.send_to(buf)
}
}
#[cfg(test)]
mod test {
use super::{UtpSocket, UtpStream};
use super::{BUF_SIZE};
use super::{SocketConnected, SocketNew, SocketClosed, SocketEndOfFile};
use std::rand::random;
use std::io::test::next_test_ip4;
use util::now_microseconds;
use packet::{UtpPacket, StatePacket, FinPacket, DataPacket, ResetPacket, SynPacket};
#[test]
fn test_socket_ipv4() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
assert_eq!(client.connected_to, server_addr);
drop(client);
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert_eq!(server.connected_to, client_addr);
assert!(server.state == SocketConnected);
drop(server);
}
#[test]
fn test_recvfrom_on_closed_socket() {
use std::io::{Closed, EndOfFile};
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
assert_eq!(client.close(), Ok(()));
drop(client);
});
// Make the server listen for incoming connections
let mut buf = [0u8, ..BUF_SIZE];
let _resp = server.recv_from(buf);
assert!(server.state == SocketConnected);
// Closing the connection is fine
match server.recv_from(buf) {
Err(e) => panic!("{}", e),
_ => {},
}
assert_eq!(server.state, SocketEndOfFile);
// Trying to listen on the socket after closing it raises an
// EOF error
match server.recv_from(buf) {
Err(e) => assert_eq!(e.kind, EndOfFile),
v => panic!("expected {}, got {}", EndOfFile, v),
}
assert_eq!(server.state, SocketClosed);
// Trying again raises a Closed error
match server.recv_from(buf) {
Err(e) => assert_eq!(e.kind, Closed),
v => panic!("expected {}, got {}", Closed, v),
}
drop(server);
}
#[test]
fn test_sendto_on_closed_socket() {
use std::io::Closed;
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
spawn(proc() {
let client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let mut buf = [0u8, ..BUF_SIZE];
let mut client = client;
iotry!(client.recv_from(buf));
});
// Make the server listen for incoming connections
let mut buf = [0u8, ..BUF_SIZE];
let (_read, _src) = iotry!(server.recv_from(buf));
assert!(server.state == SocketConnected);
iotry!(server.close());
assert_eq!(server.state, SocketClosed);
// Trying to send to the socket after closing it raises an
// error
match server.send_to(buf) {
Err(e) => assert_eq!(e.kind, Closed),
v => panic!("expected {}, got {}", Closed, v),
}
drop(server);
}
#[test]
fn test_acks_on_socket() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let (tx, rx) = channel();
let client = iotry!(UtpSocket::bind(client_addr));
let server = iotry!(UtpSocket::bind(server_addr));
spawn(proc() {
// Make the server listen for incoming connections
let mut server = server;
let mut buf = [0u8, ..BUF_SIZE];
let _resp = server.recv_from(buf);
tx.send(server.seq_nr);
// Close the connection
iotry!(server.recv_from(buf));
drop(server);
});
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let sender_seq_nr = rx.recv();
let ack_nr = client.ack_nr;
assert!(ack_nr != 0);
assert!(ack_nr == sender_seq_nr);
assert_eq!(client.close(), Ok(()));
// The reply to both connect (SYN) and close (FIN) should be
// STATE packets, which don't increase the sequence number
// and, hence, the receiver's acknowledgement number.
assert!(client.ack_nr == ack_nr);
drop(client);
}
#[test]
fn test_handle_packet() {
//fn test_connection_setup() {
let initial_connection_id: u16 = random();
let sender_connection_id = initial_connection_id + 1;
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
// Do we have a response?
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
// Is is of the correct type?
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
// Same connection id on both ends during connection establishment
assert!(response.connection_id() == packet.connection_id());
// Response acknowledges SYN
assert!(response.ack_nr() == packet.seq_nr());
// No payload?
assert!(response.payload.is_empty());
//}
// ---------------------------------
// fn test_connection_usage() {
let old_packet = packet;
let old_response = response;
let mut packet = UtpPacket::new();
packet.set_type(DataPacket);
packet.set_connection_id(sender_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
// Sender (i.e., who initated connection and sent SYN) has connection id
// equal to initial connection id + 1
// Receiver (i.e., who accepted connection) has connection id equal to
// initial connection id
assert!(response.connection_id() == initial_connection_id);
assert!(response.connection_id() == packet.connection_id() - 1);
// Previous packets should be ack'ed
assert!(response.ack_nr() == packet.seq_nr());
// Responses with no payload should not increase the sequence number
assert!(response.payload.is_empty());
assert!(response.seq_nr() == old_response.seq_nr());
// }
//fn test_connection_teardown() {
let old_packet = packet;
let old_response = response;
let mut packet = UtpPacket::new();
packet.set_type(FinPacket);
packet.set_connection_id(sender_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
// FIN packets have no payload but the sequence number shouldn't increase
assert!(packet.seq_nr() == old_packet.seq_nr() + 1);
// Nor should the ACK packet's sequence number
assert!(response.seq_nr() == old_response.seq_nr());
// FIN should be acknowledged
assert!(response.ack_nr() == packet.seq_nr());
//}
}
#[test]
fn test_response_to_keepalive_ack() {
// Boilerplate test setup
let initial_connection_id: u16 = random();
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
// Establish connection
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
let old_packet = packet;
let old_response = response;
// Now, send a keepalive packet
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_connection_id(initial_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
let response = socket.handle_packet(packet.clone());
assert!(response.is_none());
// Send a second keepalive packet, identical to the previous one
let response = socket.handle_packet(packet.clone());
assert!(response.is_none());
}
#[test]
fn test_response_to_wrong_connection_id() {
// Boilerplate test setup
let initial_connection_id: u16 = random();
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
// Establish connection
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
assert!(response.unwrap().get_type() == StatePacket);
// Now, disrupt connection with a packet with an incorrect connection id
let new_connection_id = initial_connection_id * 2;
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_connection_id(new_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == ResetPacket);
assert!(response.ack_nr() == packet.seq_nr());
}
#[test]
fn test_utp_stream() {
let server_addr = next_test_ip4();
let mut server = iotry!(UtpStream::bind(server_addr));
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.close());
});
iotry!(server.read_to_end());
}
#[test]
fn test_utp_stream_small_data() {
// Fits in a packet
const LEN: uint = 1024;
let data = Vec::from_fn(LEN, |idx| idx as u8);
assert_eq!(LEN, data.len());
let d = data.clone();
let server_addr = next_test_ip4();
let mut server = UtpStream::bind(server_addr);
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(d.as_slice()));
iotry!(client.close());
});
let read = iotry!(server.read_to_end());
assert!(!read.is_empty());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_utp_stream_large_data() {
// Has to be sent over several packets
const LEN: uint = 1024 * 1024;
let data = Vec::from_fn(LEN, |idx| idx as u8);
assert_eq!(LEN, data.len());
let d = data.clone();
let server_addr = next_test_ip4();
let mut server = UtpStream::bind(server_addr);
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(d.as_slice()));
iotry!(client.close());
});
let read = iotry!(server.read_to_end());
assert!(!read.is_empty());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_utp_stream_successive_reads() {
use std::io::Closed;
const LEN: uint = 1024;
let data: Vec<u8> = Vec::from_fn(LEN, |idx| idx as u8);
assert_eq!(LEN, data.len());
let d = data.clone();
let server_addr = next_test_ip4();
let mut server = UtpStream::bind(server_addr);
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(d.as_slice()));
iotry!(client.close());
});
iotry!(server.read_to_end());
let mut buf = [0u8, ..4096];
match server.read(buf) {
Err(ref e) if e.kind == Closed => {},
_ => panic!("should have failed with Closed"),
};
}
#[test]
fn test_unordered_packets() {
// Boilerplate test setup
let initial_connection_id: u16 = random();
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
// Establish connection
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
let old_packet = packet;
let old_response = response;
let mut window: Vec<UtpPacket> = Vec::new();
// Now, send a keepalive packet
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(initial_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
packet.payload = vec!(1,2,3);
window.push(packet);
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(initial_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 2);
packet.set_ack_nr(old_response.seq_nr());
packet.payload = vec!(4,5,6);
window.push(packet);
// Send packets in reverse order
let response = socket.handle_packet(window[1].clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.ack_nr() != window[1].seq_nr());
let response = socket.handle_packet(window[0].clone());
assert!(response.is_some());
}
#[test]
fn test_socket_unordered_packets() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let mut s = client.socket;
let mut window: Vec<UtpPacket> = Vec::new();
for data in Vec::from_fn(12, |idx| idx as u8 + 1).as_slice().chunks(3) {
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(client.sender_connection_id);
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
packet.payload = data.to_vec();
window.push(packet.clone());
client.send_window.push(packet.clone());
client.seq_nr += 1;
}
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(FinPacket);
packet.set_connection_id(client.sender_connection_id);
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
window.push(packet);
client.seq_nr += 1;
iotry!(s.send_to(window[3].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[2].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[1].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[0].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[4].bytes().as_slice(), server_addr));
for _ in range(0u, 2) {
let mut buf = [0, ..BUF_SIZE];
iotry!(s.recv_from(buf));
}
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert!(server.state == SocketConnected);
let mut stream = UtpStream { socket: server };
let expected: Vec<u8> = Vec::from_fn(12, |idx| idx as u8 + 1);
match stream.read_to_end() {
Ok(data) => {
assert_eq!(data.len(), expected.len());
assert_eq!(data, expected);
},
Err(e) => panic!("{}", e),
}
}
#[test]
fn test_socket_should_not_buffer_syn_packets() {
use std::io::net::udp::UdpSocket;
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let server = iotry!(UtpSocket::bind(server_addr));
let client = iotry!(UdpSocket::bind(client_addr));
let test_syn_raw = [0x41, 0x00, 0x41, 0xa7, 0x00, 0x00, 0x00,
0x27, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x3a,
0xf1, 0x00, 0x00];
let test_syn_pkt = UtpPacket::decode(test_syn_raw);
let seq_nr = test_syn_pkt.seq_nr();
spawn(proc() {
let mut client = client;
iotry!(client.send_to(test_syn_raw, server_addr));
client.set_timeout(Some(10));
let mut buf = [0, ..BUF_SIZE];
let packet = match client.recv_from(buf) {
Ok((nread, _src)) => UtpPacket::decode(buf.slice_to(nread)),
Err(e) => panic!("{}", e),
};
assert_eq!(packet.ack_nr(), seq_nr);
drop(client);
});
let mut server = server;
let mut buf = [0, ..20];
iotry!(server.recv_from(buf));
assert!(server.ack_nr != 0);
assert_eq!(server.ack_nr, seq_nr);
assert!(server.incoming_buffer.is_empty());
}
#[test]
fn test_response_to_triple_ack() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let mut server = iotry!(UtpSocket::bind(server_addr));
let client = iotry!(UtpSocket::bind(client_addr));
// Fits in a packet
const LEN: uint = 1024;
let data = Vec::from_fn(LEN, |idx| idx as u8);
let d = data.clone();
assert_eq!(LEN, data.len());
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
iotry!(client.send_to(d.as_slice()));
iotry!(client.close());
});
let mut buf = [0, ..BUF_SIZE];
// Expect SYN
iotry!(server.recv_from(buf));
// Receive data
let mut data_packet;
match server.socket.recv_from(buf) {
Ok((read, _src)) => {
data_packet = UtpPacket::decode(buf.slice_to(read));
assert!(data_packet.get_type() == DataPacket);
assert_eq!(data_packet.payload, data);
assert_eq!(data_packet.payload.len(), data.len());
},
Err(e) => panic!("{}", e),
}
let data_packet = data_packet;
// Send triple ACK
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_seq_nr(server.seq_nr);
packet.set_ack_nr(data_packet.seq_nr() - 1);
packet.set_connection_id(server.sender_connection_id);
for _ in range(0u, 3) {
iotry!(server.socket.send_to(packet.bytes().as_slice(), client_addr));
}
// Receive data again and check that it's the same we reported as missing
match server.socket.recv_from(buf) {
Ok((0, _)) => panic!("Received 0 bytes from socket"),
Ok((read, _src)) => {
let packet = UtpPacket::decode(buf.slice_to(read));
assert_eq!(packet.get_type(), DataPacket);
assert_eq!(packet.seq_nr(), data_packet.seq_nr());
assert!(packet.payload == data_packet.payload);
let response = server.handle_packet(packet).unwrap();
iotry!(server.socket.send_to(response.bytes().as_slice(), server.connected_to));
},
Err(e) => panic!("{}", e),
}
// Receive close
iotry!(server.recv_from(buf));
}
#[test]
fn test_socket_timeout_request() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
let len = 512;
let data = Vec::from_fn(len, |idx| idx as u8);
let d = data.clone();
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
assert_eq!(client.connected_to, server_addr);
iotry!(client.send_to(d.as_slice()));
drop(client);
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert_eq!(server.connected_to, client_addr);
assert!(server.state == SocketConnected);
// Purposefully read from UDP socket directly and discard it, in order
// to behave as if the packet was lost and thus trigger the timeout
// handling in the *next* call to `UtpSocket.recv_from`.
iotry!(server.socket.recv_from(buf));
// Set a much smaller than usual timeout, for quicker test completion
server.congestion_timeout = 50;
// Now wait for the previously discarded packet
loop {
match server.recv_from(buf) {
Ok((0, _)) => continue,
Ok(_) => break,
Err(e) => panic!("{}", e),
}
}
drop(server);
}
#[test]
fn test_sorted_buffer_insertion() {
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
let mut packet = UtpPacket::new();
packet.set_seq_nr(1);
assert!(socket.incoming_buffer.is_empty());
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 1);
packet.set_seq_nr(2);
packet.set_timestamp_microseconds(128);
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 2);
assert_eq!(socket.incoming_buffer[1].seq_nr(), 2);
assert_eq!(socket.incoming_buffer[1].timestamp_microseconds(), 128);
packet.set_seq_nr(3);
packet.set_timestamp_microseconds(256);
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 3);
assert_eq!(socket.incoming_buffer[2].seq_nr(), 3);
assert_eq!(socket.incoming_buffer[2].timestamp_microseconds(), 256);
// Replace a packet with a more recent version
packet.set_seq_nr(2);
packet.set_timestamp_microseconds(456);
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 3);
assert_eq!(socket.incoming_buffer[1].seq_nr(), 2);
assert_eq!(socket.incoming_buffer[1].timestamp_microseconds(), 456);
}
#[test]
fn test_duplicate_packet_handling() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let mut s = client.socket.clone();
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(client.sender_connection_id);
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
packet.payload = vec!(1,2,3);
// Send two copies of the packet, with different timestamps
for _ in range(0u, 2) {
packet.set_timestamp_microseconds(now_microseconds());
iotry!(s.send_to(packet.bytes().as_slice(), server_addr));
}
client.seq_nr += 1;
// Receive one ACK
for _ in range(0u, 1) {
let mut buf = [0, ..BUF_SIZE];
iotry!(s.recv_from(buf));
}
iotry!(client.close());
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert!(server.state == SocketConnected);
let mut stream = UtpStream { socket: server };
let expected: Vec<u8> = vec!(1,2,3);
match stream.read_to_end() {
Ok(data) => {
println!("{}", data);
assert_eq!(data.len(), expected.len());
assert_eq!(data, expected);
},
Err(e) => panic!("{}", e),
}
}
#[test]
fn test_selective_ack_response() {
let server_addr = next_test_ip4();
let len = 1024 * 10;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
// Client
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
client.socket.congestion_timeout = 50;
// Stream.write
iotry!(client.write(to_send.as_slice()));
iotry!(client.close());
});
// Server
let mut server = iotry!(UtpSocket::bind(server_addr));
let mut buf = [0, ..BUF_SIZE];
// Connect
iotry!(server.recv_from(buf));
// Discard packets
iotry!(server.socket.recv_from(buf));
iotry!(server.socket.recv_from(buf));
iotry!(server.socket.recv_from(buf));
// Generate SACK
let mut packet = UtpPacket::new();
packet.set_seq_nr(server.seq_nr);
packet.set_ack_nr(server.ack_nr - 1);
packet.set_connection_id(server.sender_connection_id);
packet.set_timestamp_microseconds(now_microseconds());
packet.set_type(StatePacket);
packet.set_sack(Some(vec!(12, 0, 0, 0)));
// Send SACK
iotry!(server.socket.send_to(packet.bytes().as_slice(), server.connected_to.clone()));
// Expect to receive "missing" packets
let mut stream = UtpStream { socket: server };
let read = iotry!(stream.read_to_end());
assert!(!read.is_empty());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_correct_packet_loss() {
let (client_addr, server_addr) = (next_test_ip4(), next_test_ip4());
let mut server = iotry!(UtpStream::bind(server_addr));
let client = iotry!(UtpSocket::bind(client_addr));
let len = 1024 * 10;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
// Send everything except the odd chunks
let chunks = to_send.as_slice().chunks(BUF_SIZE);
let dst = client.connected_to;
for (index, chunk) in chunks.enumerate() {
let mut packet = UtpPacket::new();
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
packet.set_connection_id(client.sender_connection_id);
packet.set_timestamp_microseconds(now_microseconds());
packet.payload = chunk.to_vec();
packet.set_type(DataPacket);
if index % 2 == 0 {
iotry!(client.socket.send_to(packet.bytes().as_slice(), dst));
}
client.send_window.push(packet);
client.seq_nr += 1;
}
iotry!(client.close());
});
let read = iotry!(server.read_to_end());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_tolerance_to_small_buffers() {
use std::io::EndOfFile;
let server_addr = next_test_ip4();
let mut server = iotry!(UtpSocket::bind(server_addr));
let len = 1024;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(to_send.as_slice()));
iotry!(client.close());
});
let mut read = Vec::new();
while server.state != SocketClosed {
let mut small_buffer = [0, ..512];
match server.recv_from(small_buffer) {
Ok((0, _src)) => (),
Ok((len, _src)) => read.push_all(small_buffer.slice_to(len)),
Err(ref e) if e.kind == EndOfFile => break,
Err(e) => panic!("{}", e),
}
}
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_sequence_number_rollover() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let mut server = UtpStream::bind(server_addr);
let len = BUF_SIZE * 4;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
spawn(proc() {
let mut socket = iotry!(UtpSocket::bind(client_addr));
// Advance socket's sequence number
socket.seq_nr = ::std::u16::MAX - (to_send.len() / (BUF_SIZE * 2)) as u16;
let socket = iotry!(socket.connect(server_addr));
let mut client = UtpStream { socket: socket };
// Send enough data to rollover
iotry!(client.write(to_send.as_slice()));
// Check that the sequence number did rollover
assert!(client.socket.seq_nr < 50);
// Close connection
iotry!(client.close());
});
let received = iotry!(server.read_to_end());
assert_eq!(received.len(), data.len());
assert_eq!(received, data);
}
}
Rename missing_doc to missing_docs.
//! Implementation of the Micro Transport Protocol.[^spec]
//!
//! [^spec]: http://www.bittorrent.org/beps/bep_0029.html
// __________ ____ ____
// /_ __/ __ \/ __ \/ __ \
// / / / / / / / / / / / /
// / / / /_/ / /_/ / /_/ /
// /_/ \____/_____/\____/
//
// - Lossy UDP socket for testing purposes: send and receive ops are wrappers
// that stochastically drop or reorder packets.
// - Sending FIN on drop
// - Handle packet loss
// - Path MTU discovery (RFC4821)
#![feature(macro_rules)]
#![feature(phase)]
#![feature(if_let)]
#![feature(while_let)]
#![feature(globs)]
#![deny(missing_docs)]
extern crate time;
#[phase(plugin, link)] extern crate log;
use std::io::net::udp::UdpSocket;
use std::io::net::ip::SocketAddr;
use std::io::IoResult;
use std::rand::random;
use std::collections::{DList, Deque};
use util::*;
use packet::*;
mod util;
mod bit_iterator;
mod packet;
// For simplicity's sake, let us assume no packet will ever exceed the
// Ethernet maximum transfer unit of 1500 bytes.
const BUF_SIZE: uint = 1500;
const DELAY_MAX_AGE: u32 = 2 * 60 * 1_000_000;
const GAIN: uint = 1;
const ALLOWED_INCREASE: uint = 1;
const TARGET: uint = 100_000; // 100 milliseconds
const MSS: uint = 1400;
const MIN_CWND: uint = 2;
const INIT_CWND: uint = 2;
macro_rules! iotry(
($e:expr) => (match $e { Ok(e) => e, Err(e) => panic!("{}", e) })
)
#[deriving(PartialEq,Eq,Show)]
enum UtpSocketState {
SocketNew,
SocketConnected,
SocketSynSent,
SocketFinReceived,
SocketFinSent,
SocketResetReceived,
SocketClosed,
SocketEndOfFile,
}
/// A uTP (Micro Transport Protocol) socket.
pub struct UtpSocket {
socket: UdpSocket,
connected_to: SocketAddr,
sender_connection_id: u16,
receiver_connection_id: u16,
seq_nr: u16,
ack_nr: u16,
state: UtpSocketState,
// Received but not acknowledged packets
incoming_buffer: Vec<UtpPacket>,
// Sent but not yet acknowledged packets
send_window: Vec<UtpPacket>,
unsent_queue: DList<UtpPacket>,
duplicate_ack_count: uint,
last_acked: u16,
last_acked_timestamp: u32,
fin_seq_nr: u16,
rtt: int,
rtt_variance: int,
pending_data: Vec<u8>,
curr_window: uint,
remote_wnd_size: uint,
current_delays: Vec<(u32,u32)>,
base_delays: Vec<(u32,u32)>,
congestion_timeout: u32,
cwnd: uint,
}
impl UtpSocket {
/// Create a UTP socket from the given address.
#[unstable]
pub fn bind(addr: SocketAddr) -> IoResult<UtpSocket> {
let skt = UdpSocket::bind(addr);
let connection_id = random::<u16>();
match skt {
Ok(x) => Ok(UtpSocket {
socket: x,
connected_to: addr,
receiver_connection_id: connection_id,
sender_connection_id: connection_id + 1,
seq_nr: 1,
ack_nr: 0,
state: SocketNew,
incoming_buffer: Vec::new(),
send_window: Vec::new(),
unsent_queue: DList::new(),
duplicate_ack_count: 0,
last_acked: 0,
last_acked_timestamp: 0,
fin_seq_nr: 0,
rtt: 0,
rtt_variance: 0,
pending_data: Vec::new(),
curr_window: 0,
remote_wnd_size: 0,
current_delays: Vec::new(),
base_delays: Vec::new(),
congestion_timeout: 1000, // 1 second
cwnd: INIT_CWND * MSS,
}),
Err(e) => Err(e)
}
}
/// Open a uTP connection to a remote host by hostname or IP address.
#[unstable]
pub fn connect(mut self, other: SocketAddr) -> IoResult<UtpSocket> {
use std::io::{IoError, ConnectionFailed};
self.connected_to = other;
assert_eq!(self.receiver_connection_id + 1, self.sender_connection_id);
let mut packet = UtpPacket::new();
packet.set_type(SynPacket);
packet.set_connection_id(self.receiver_connection_id);
packet.set_seq_nr(self.seq_nr);
let mut len = 0;
let mut addr = self.connected_to;
let mut buf = [0, ..BUF_SIZE];
for _ in range(0u, 5) {
packet.set_timestamp_microseconds(now_microseconds());
// Send packet
try!(self.socket.send_to(packet.bytes().as_slice(), other));
self.state = SocketSynSent;
// Validate response
self.socket.set_read_timeout(Some(500));
match self.socket.recv_from(buf) {
Ok((read, src)) => { len = read; addr = src; break; },
Err(ref e) if e.kind == std::io::TimedOut => continue,
Err(e) => return Err(e),
};
}
assert!(len == HEADER_SIZE);
assert!(addr == self.connected_to);
let packet = UtpPacket::decode(buf.slice_to(len));
if packet.get_type() != StatePacket {
return Err(IoError {
kind: ConnectionFailed,
desc: "The remote peer sent an invalid reply",
detail: None,
});
}
self.ack_nr = packet.seq_nr();
self.state = SocketConnected;
self.seq_nr += 1;
debug!("connected to: {}", self.connected_to);
return Ok(self);
}
/// Gracefully close connection to peer.
///
/// This method allows both peers to receive all packets still in
/// flight.
#[unstable]
pub fn close(&mut self) -> IoResult<()> {
// Wait for acknowledgment on pending sent packets
let mut buf = [0u8, ..BUF_SIZE];
while !self.send_window.is_empty() {
try!(self.recv_from(buf));
}
let mut packet = UtpPacket::new();
packet.set_connection_id(self.sender_connection_id);
packet.set_seq_nr(self.seq_nr);
packet.set_ack_nr(self.ack_nr);
packet.set_timestamp_microseconds(now_microseconds());
packet.set_type(FinPacket);
// Send FIN
try!(self.socket.send_to(packet.bytes().as_slice(), self.connected_to));
self.state = SocketFinSent;
// Receive JAKE
while self.state != SocketClosed {
match self.recv_from(buf) {
Ok(_) => {},
Err(ref e) if e.kind == std::io::EndOfFile => self.state = SocketClosed,
Err(e) => return Err(e),
};
}
Ok(())
}
/// Receive data from socket.
///
/// On success, returns the number of bytes read and the sender's address.
/// Returns SocketEndOfFile after receiving a FIN packet when the remaining
/// inflight packets are consumed. Subsequent calls return SocketClosed.
#[unstable]
pub fn recv_from(&mut self, buf: &mut[u8]) -> IoResult<(uint,SocketAddr)> {
use std::io::{IoError, EndOfFile, Closed};
if self.state == SocketEndOfFile {
self.state = SocketClosed;
return Err(IoError {
kind: EndOfFile,
desc: "End of file reached",
detail: None,
});
}
if self.state == SocketClosed {
return Err(IoError {
kind: Closed,
desc: "Connection closed",
detail: None,
});
}
match self.flush_incoming_buffer(buf) {
0 => self.recv(buf),
read => Ok((read, self.connected_to)),
}
}
fn recv(&mut self, buf: &mut[u8]) -> IoResult<(uint,SocketAddr)> {
use std::io::{IoError, TimedOut, ConnectionReset};
let mut b = [0, ..BUF_SIZE + HEADER_SIZE];
if self.state != SocketNew {
debug!("setting read timeout of {} ms", self.congestion_timeout);
self.socket.set_read_timeout(Some(self.congestion_timeout as u64));
}
let (read, src) = match self.socket.recv_from(b) {
Err(ref e) if e.kind == TimedOut => {
debug!("recv_from timed out");
self.congestion_timeout = self.congestion_timeout * 2;
self.cwnd = MSS;
self.send_fast_resend_request();
return Ok((0, self.connected_to));
},
Ok(x) => x,
Err(e) => return Err(e),
};
let packet = UtpPacket::decode(b.slice_to(read));
debug!("received {}", packet);
if packet.get_type() == ResetPacket {
return Err(IoError {
kind: ConnectionReset,
desc: "Remote host aborted connection (incorrect connection id)",
detail: None,
});
}
if packet.get_type() == SynPacket {
self.connected_to = src;
}
let shallow_clone = packet.shallow_clone();
if packet.get_type() == DataPacket && self.ack_nr + 1 <= packet.seq_nr() {
self.insert_into_buffer(packet);
}
if let Some(pkt) = self.handle_packet(shallow_clone) {
let mut pkt = pkt;
pkt.set_wnd_size(BUF_SIZE as u32);
try!(self.socket.send_to(pkt.bytes().as_slice(), src));
debug!("sent {}", pkt);
}
// Flush incoming buffer if possible
let read = self.flush_incoming_buffer(buf);
Ok((read, src))
}
#[allow(missing_docs)]
#[deprecated = "renamed to `recv_from`"]
pub fn recvfrom(&mut self, buf: &mut[u8]) -> IoResult<(uint,SocketAddr)> {
self.recv_from(buf)
}
fn prepare_reply(&self, original: &UtpPacket, t: UtpPacketType) -> UtpPacket {
let mut resp = UtpPacket::new();
resp.set_type(t);
let self_t_micro: u32 = now_microseconds();
let other_t_micro: u32 = original.timestamp_microseconds();
resp.set_timestamp_microseconds(self_t_micro);
resp.set_timestamp_difference_microseconds((self_t_micro - other_t_micro));
resp.set_connection_id(self.sender_connection_id);
resp.set_seq_nr(self.seq_nr);
resp.set_ack_nr(self.ack_nr);
resp
}
/// Remove packet in incoming buffer and update current acknowledgement
/// number.
fn advance_incoming_buffer(&mut self) -> Option<UtpPacket> {
match self.incoming_buffer.remove(0) {
Some(packet) => {
debug!("Removed packet from incoming buffer: {}", packet);
self.ack_nr = packet.seq_nr();
Some(packet)
},
None => None
}
}
/// Discards sequential, ordered packets in incoming buffer, starting from
/// the most recently acknowledged to the most recent, as long as there are
/// no missing packets. The discarded packets' payload is written to the
/// slice `buf`, starting in position `start`.
/// Returns the last written index.
fn flush_incoming_buffer(&mut self, buf: &mut [u8]) -> uint {
let mut idx = 0;
// Check if there is any pending data from a partially flushed packet
if !self.pending_data.is_empty() {
let len = buf.clone_from_slice(self.pending_data.as_slice());
// If all the data in the pending data buffer fits the given output
// buffer, remove the corresponding packet from the incoming buffer
// and clear the pending data buffer
if len == self.pending_data.len() {
self.pending_data.clear();
self.advance_incoming_buffer();
return idx + len;
} else {
// Remove the bytes copied to the output buffer from the pending
// data buffer (i.e., pending -= output)
self.pending_data = self.pending_data.slice_from(len).to_vec();
}
}
// Copy the payload of as many packets in the incoming buffer as possible
while !self.incoming_buffer.is_empty() &&
(self.ack_nr == self.incoming_buffer[0].seq_nr() ||
self.ack_nr + 1 == self.incoming_buffer[0].seq_nr())
{
let len = std::cmp::min(buf.len() - idx, self.incoming_buffer[0].payload.len());
for i in range(0, len) {
buf[idx] = self.incoming_buffer[0].payload[i];
idx += 1;
}
// Remove top packet if its payload fits the output buffer
if self.incoming_buffer[0].payload.len() == len {
self.advance_incoming_buffer();
} else {
self.pending_data.push_all(self.incoming_buffer[0].payload.slice_from(len));
}
// Stop if the output buffer is full
if buf.len() == idx {
return idx;
}
}
return idx;
}
/// Send data on socket to the remote peer. Returns nothing on success.
//
// # Implementation details
//
// This method inserts packets into the send buffer and keeps trying to
// advance the send window until an ACK corresponding to the last packet is
// received.
//
// Note that the buffer passed to `send_to` might exceed the maximum packet
// size, which will result in the data being split over several packets.
#[unstable]
pub fn send_to(&mut self, buf: &[u8]) -> IoResult<()> {
use std::io::{IoError, Closed};
if self.state == SocketClosed {
return Err(IoError {
kind: Closed,
desc: "Connection closed",
detail: None,
});
}
for chunk in buf.chunks(MSS - HEADER_SIZE) {
let mut packet = UtpPacket::new();
packet.set_type(DataPacket);
packet.payload = chunk.to_vec();
packet.set_timestamp_microseconds(now_microseconds());
packet.set_seq_nr(self.seq_nr);
packet.set_ack_nr(self.ack_nr);
packet.set_connection_id(self.sender_connection_id);
self.unsent_queue.push(packet);
self.seq_nr += 1;
}
// Flush unsent packet queue
self.send();
// Consume acknowledgements until latest packet
let mut buf = [0, ..BUF_SIZE];
while self.last_acked < self.seq_nr - 1 {
try!(self.recv_from(buf));
}
Ok(())
}
/// Send every packet in the unsent packet queue.
fn send(&mut self) {
let dst = self.connected_to;
while let Some(packet) = self.unsent_queue.pop_front() {
debug!("current window: {}", self.send_window.len());
let max_inflight = std::cmp::min(self.cwnd, self.remote_wnd_size);
let max_inflight = std::cmp::max(MIN_CWND * MSS, max_inflight);
while self.curr_window + packet.len() > max_inflight {
let mut buf = [0, ..BUF_SIZE];
iotry!(self.recv_from(buf));
}
iotry!(self.socket.send_to(packet.bytes().as_slice(), dst));
debug!("sent {}", packet);
self.curr_window += packet.len();
self.send_window.push(packet);
}
}
#[allow(missing_docs)]
#[deprecated = "renamed to `send_to`"]
pub fn sendto(&mut self, buf: &[u8]) -> IoResult<()> {
self.send_to(buf)
}
/// Send fast resend request.
///
/// Sends three identical ACK/STATE packets to the remote host, signalling a
/// fast resend request.
fn send_fast_resend_request(&mut self) {
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_ack_nr(self.ack_nr);
packet.set_seq_nr(self.seq_nr);
packet.set_connection_id(self.sender_connection_id);
for _ in range(0u, 3) {
let t = now_microseconds();
packet.set_timestamp_microseconds(t);
packet.set_timestamp_difference_microseconds((t - self.last_acked_timestamp));
iotry!(self.socket.send_to(packet.bytes().as_slice(), self.connected_to));
debug!("sent {}", packet);
}
}
fn update_base_delay(&mut self, v: u32) {
// Remove measurements more than 2 minutes old
let now = now_microseconds();
while !self.base_delays.is_empty() && now - self.base_delays[0].val0() > DELAY_MAX_AGE {
self.base_delays.remove(0);
}
// Insert new measurement
self.base_delays.push((now_microseconds(), v));
}
fn update_current_delay(&mut self, v: u32) {
// Remove measurements more than 2 minutes old
let now = now_microseconds();
while !self.current_delays.is_empty() && now - self.current_delays[0].val0() > DELAY_MAX_AGE {
self.current_delays.remove(0);
}
// Insert new measurement
self.current_delays.push((now_microseconds(), v));
}
fn update_congestion_timeout(&mut self, current_delay: int) {
let delta = self.rtt - current_delay;
self.rtt_variance += (std::num::abs(delta) - self.rtt_variance) / 4;
self.rtt += (current_delay - self.rtt) / 8;
self.congestion_timeout = std::cmp::max(self.rtt + self.rtt_variance * 4, 500) as u32;
self.congestion_timeout = std::cmp::min(self.congestion_timeout, 60_000);
debug!("current_delay: {}", current_delay);
debug!("delta: {}", delta);
debug!("self.rtt_variance: {}", self.rtt_variance);
debug!("self.rtt: {}", self.rtt);
debug!("self.congestion_timeout: {}", self.congestion_timeout);
}
/// Calculate the filtered current delay in the current window.
///
/// The current delay is calculated through application of the exponential
/// weighted moving average filter with smoothing factor 0.333 over the
/// current delays in the current window.
fn filtered_current_delay(&self) -> u32 {
let input = self.current_delays.iter().map(|&(_,x)| x as f64).collect();
let output = exponential_weighted_moving_average(input, 0.333);
output[output.len() - 1] as u32
}
/// Calculate the lowest base delay in the current window.
fn min_base_delay(&self) -> u32 {
self.base_delays.iter().min().unwrap().val1()
}
/// Build the selective acknowledgment payload for usage in packets.
fn build_selective_ack(&self) -> Vec<u8> {
let mut stashed = self.incoming_buffer.iter()
.filter(|&pkt| pkt.seq_nr() > self.ack_nr);
let mut sack = Vec::new();
for packet in stashed {
let diff = packet.seq_nr() - self.ack_nr - 2;
let byte = (diff / 8) as uint;
let bit = (diff % 8) as uint;
if byte >= sack.len() {
sack.push(0u8);
}
let mut bitarray = sack.pop().unwrap();
bitarray |= 1 << bit;
sack.push(bitarray);
}
// Make sure the amount of elements in the SACK vector is a
// multiple of 4
if sack.len() % 4 != 0 {
let len = sack.len();
sack.grow((len / 4 + 1) * 4 - len, 0);
}
return sack;
}
fn resend_lost_packet(&mut self, lost_packet_nr: u16) {
match self.send_window.iter().find(|pkt| pkt.seq_nr() == lost_packet_nr) {
None => debug!("Packet {} not found", lost_packet_nr),
Some(packet) => {
iotry!(self.socket.send_to(packet.bytes().as_slice(), self.connected_to));
debug!("sent {}", packet);
}
}
}
/// Forget sent packets that were acknowledged by the remote peer.
fn advance_send_window(&mut self) {
if let Some(position) = self.send_window.iter()
.position(|pkt| pkt.seq_nr() == self.last_acked)
{
for _ in range(0, position + 1) {
let packet = self.send_window.remove(0).unwrap();
self.curr_window -= packet.len();
}
}
debug!("self.curr_window: {}", self.curr_window);
}
/// Handle incoming packet, updating socket state accordingly.
///
/// Returns appropriate reply packet, if needed.
fn handle_packet(&mut self, packet: UtpPacket) -> Option<UtpPacket> {
// Reset connection if connection id doesn't match and this isn't a SYN
if packet.get_type() != SynPacket &&
!(packet.connection_id() == self.sender_connection_id ||
packet.connection_id() == self.receiver_connection_id) {
return Some(self.prepare_reply(&packet, ResetPacket));
}
// Acknowledge only if the packet strictly follows the previous one
if self.ack_nr + 1 == packet.seq_nr() {
self.ack_nr = packet.seq_nr();
}
self.remote_wnd_size = packet.wnd_size() as uint;
debug!("self.remote_wnd_size: {}", self.remote_wnd_size);
// Ignore packets with sequence number higher than the one in the FIN packet.
if self.state == SocketFinReceived && self.fin_seq_nr == self.ack_nr &&
packet.seq_nr() > self.fin_seq_nr
{
debug!("Ignoring packet with sequence number {} (higher than FIN: {})",
packet.seq_nr(), self.fin_seq_nr);
return None;
}
match packet.get_type() {
SynPacket => { // Respond with an ACK and populate own fields
// Update socket information for new connections
self.ack_nr = packet.seq_nr();
self.seq_nr = random();
self.receiver_connection_id = packet.connection_id() + 1;
self.sender_connection_id = packet.connection_id();
self.state = SocketConnected;
Some(self.prepare_reply(&packet, StatePacket))
}
DataPacket => {
let mut reply = self.prepare_reply(&packet, StatePacket);
if self.ack_nr + 1 < packet.seq_nr() {
debug!("current ack_nr ({}) is behind received packet seq_nr ({})",
self.ack_nr, packet.seq_nr());
// Set SACK extension payload if the packet is not in order
let sack = self.build_selective_ack();
if sack.len() > 0 {
reply.set_sack(Some(sack));
}
}
Some(reply)
},
FinPacket => {
self.state = SocketFinReceived;
self.fin_seq_nr = packet.seq_nr();
// If all packets are received and handled
if self.pending_data.is_empty() &&
self.incoming_buffer.is_empty() &&
self.ack_nr == self.fin_seq_nr
{
self.state = SocketEndOfFile;
Some(self.prepare_reply(&packet, StatePacket))
} else {
debug!("FIN received but there are missing packets");
None
}
}
StatePacket => {
if packet.ack_nr() == self.last_acked {
self.duplicate_ack_count += 1;
} else {
self.last_acked = packet.ack_nr();
self.last_acked_timestamp = now_microseconds();
self.duplicate_ack_count = 1;
}
self.update_base_delay(packet.timestamp_microseconds());
self.update_current_delay(packet.timestamp_difference_microseconds());
let bytes_newly_acked = packet.len();
let flightsize = self.curr_window;
let queuing_delay = self.filtered_current_delay() - self.min_base_delay();
let target = TARGET as u32;
let off_target: u32 = (target - queuing_delay) / target;
self.cwnd += GAIN * off_target as uint * bytes_newly_acked * MSS / self.cwnd;
let max_allowed_cwnd = flightsize + ALLOWED_INCREASE * MSS;
self.cwnd = std::cmp::min(self.cwnd, max_allowed_cwnd);
self.cwnd = std::cmp::max(self.cwnd, MIN_CWND * MSS);
let rtt = (target - off_target) / 1000; // in milliseconds
self.update_congestion_timeout(rtt as int);
debug!("queuing_delay: {}", queuing_delay);
debug!("off_target: {}", off_target);
debug!("cwnd: {}", self.cwnd);
debug!("max_allowed_cwnd: {}", max_allowed_cwnd);
let mut packet_loss_detected: bool = !self.send_window.is_empty() &&
self.duplicate_ack_count == 3;
// Process extensions, if any
for extension in packet.extensions.iter() {
if extension.get_type() == SelectiveAckExtension {
let bits = extension.iter();
// If three or more packets are acknowledged past the implicit missing one,
// assume it was lost.
if bits.filter(|&bit| bit == 1).count() >= 3 {
self.resend_lost_packet(packet.ack_nr() + 1);
packet_loss_detected = true;
}
let bits = extension.iter();
for (idx, received) in bits.map(|bit| bit == 1).enumerate() {
let seq_nr = packet.ack_nr() + 2 + idx as u16;
if received {
debug!("SACK: packet {} received", seq_nr);
} else if !self.send_window.is_empty() &&
seq_nr < self.send_window.last().unwrap().seq_nr()
{
debug!("SACK: packet {} lost", seq_nr);
self.resend_lost_packet(seq_nr);
packet_loss_detected = true;
} else {
break;
}
}
} else {
debug!("Unknown extension {}, ignoring", extension.get_type());
}
}
// Packet lost, halve the congestion window
if packet_loss_detected {
debug!("packet loss detected, halving congestion window");
self.cwnd = std::cmp::max(self.cwnd / 2, MIN_CWND * MSS);
debug!("cwnd: {}", self.cwnd);
}
// Three duplicate ACKs, must resend packets since `ack_nr + 1`
// TODO: checking if the send buffer isn't empty isn't a
// foolproof way to differentiate between triple-ACK and three
// keep alives spread in time
if !self.send_window.is_empty() && self.duplicate_ack_count == 3 {
for i in range(0, self.send_window.len()) {
let seq_nr = self.send_window[i].seq_nr();
if seq_nr <= packet.ack_nr() { continue; }
self.resend_lost_packet(seq_nr);
}
}
// Success, advance send window
self.advance_send_window();
if self.state == SocketFinSent && packet.ack_nr() == self.seq_nr {
self.state = SocketClosed;
}
None
},
ResetPacket => {
self.state = SocketResetReceived;
None
},
}
}
/// Insert a packet into the socket's buffer.
///
/// The packet is inserted in such a way that the buffer is
/// ordered ascendingly by their sequence number. This allows
/// storing packets that were received out of order.
///
/// Inserting a duplicate of a packet will replace the one in the buffer if
/// it's more recent (larger timestamp).
fn insert_into_buffer(&mut self, packet: UtpPacket) {
let mut i = 0;
for pkt in self.incoming_buffer.iter() {
if pkt.seq_nr() >= packet.seq_nr() {
break;
}
i += 1;
}
if !self.incoming_buffer.is_empty() && i < self.incoming_buffer.len() &&
self.incoming_buffer[i].seq_nr() == packet.seq_nr() {
self.incoming_buffer.remove(i);
}
self.incoming_buffer.insert(i, packet);
}
}
/// Stream interface for UtpSocket.
pub struct UtpStream {
socket: UtpSocket,
}
impl UtpStream {
/// Create a uTP stream listening on the given address.
#[unstable]
pub fn bind(addr: SocketAddr) -> IoResult<UtpStream> {
let socket = UtpSocket::bind(addr);
match socket {
Ok(s) => Ok(UtpStream { socket: s }),
Err(e) => Err(e),
}
}
/// Open a uTP connection to a remote host by hostname or IP address.
#[unstable]
pub fn connect(dst: SocketAddr) -> IoResult<UtpStream> {
use std::io::net::ip::Ipv4Addr;
// Port 0 means the operating system gets to choose it
let my_addr = SocketAddr { ip: Ipv4Addr(0,0,0,0), port: 0 };
let socket = match UtpSocket::bind(my_addr) {
Ok(s) => s,
Err(e) => return Err(e),
};
match socket.connect(dst) {
Ok(socket) => Ok(UtpStream { socket: socket }),
Err(e) => Err(e),
}
}
/// Gracefully close connection to peer.
///
/// This method allows both peers to receive all packets still in
/// flight.
#[unstable]
pub fn close(&mut self) -> IoResult<()> {
self.socket.close()
}
}
impl Reader for UtpStream {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
match self.socket.recv_from(buf) {
Ok((read, _src)) => Ok(read),
Err(e) => Err(e),
}
}
}
impl Writer for UtpStream {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
self.socket.send_to(buf)
}
}
#[cfg(test)]
mod test {
use super::{UtpSocket, UtpStream};
use super::{BUF_SIZE};
use super::{SocketConnected, SocketNew, SocketClosed, SocketEndOfFile};
use std::rand::random;
use std::io::test::next_test_ip4;
use util::now_microseconds;
use packet::{UtpPacket, StatePacket, FinPacket, DataPacket, ResetPacket, SynPacket};
#[test]
fn test_socket_ipv4() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
assert_eq!(client.connected_to, server_addr);
drop(client);
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert_eq!(server.connected_to, client_addr);
assert!(server.state == SocketConnected);
drop(server);
}
#[test]
fn test_recvfrom_on_closed_socket() {
use std::io::{Closed, EndOfFile};
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
assert_eq!(client.close(), Ok(()));
drop(client);
});
// Make the server listen for incoming connections
let mut buf = [0u8, ..BUF_SIZE];
let _resp = server.recv_from(buf);
assert!(server.state == SocketConnected);
// Closing the connection is fine
match server.recv_from(buf) {
Err(e) => panic!("{}", e),
_ => {},
}
assert_eq!(server.state, SocketEndOfFile);
// Trying to listen on the socket after closing it raises an
// EOF error
match server.recv_from(buf) {
Err(e) => assert_eq!(e.kind, EndOfFile),
v => panic!("expected {}, got {}", EndOfFile, v),
}
assert_eq!(server.state, SocketClosed);
// Trying again raises a Closed error
match server.recv_from(buf) {
Err(e) => assert_eq!(e.kind, Closed),
v => panic!("expected {}, got {}", Closed, v),
}
drop(server);
}
#[test]
fn test_sendto_on_closed_socket() {
use std::io::Closed;
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
spawn(proc() {
let client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let mut buf = [0u8, ..BUF_SIZE];
let mut client = client;
iotry!(client.recv_from(buf));
});
// Make the server listen for incoming connections
let mut buf = [0u8, ..BUF_SIZE];
let (_read, _src) = iotry!(server.recv_from(buf));
assert!(server.state == SocketConnected);
iotry!(server.close());
assert_eq!(server.state, SocketClosed);
// Trying to send to the socket after closing it raises an
// error
match server.send_to(buf) {
Err(e) => assert_eq!(e.kind, Closed),
v => panic!("expected {}, got {}", Closed, v),
}
drop(server);
}
#[test]
fn test_acks_on_socket() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let (tx, rx) = channel();
let client = iotry!(UtpSocket::bind(client_addr));
let server = iotry!(UtpSocket::bind(server_addr));
spawn(proc() {
// Make the server listen for incoming connections
let mut server = server;
let mut buf = [0u8, ..BUF_SIZE];
let _resp = server.recv_from(buf);
tx.send(server.seq_nr);
// Close the connection
iotry!(server.recv_from(buf));
drop(server);
});
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let sender_seq_nr = rx.recv();
let ack_nr = client.ack_nr;
assert!(ack_nr != 0);
assert!(ack_nr == sender_seq_nr);
assert_eq!(client.close(), Ok(()));
// The reply to both connect (SYN) and close (FIN) should be
// STATE packets, which don't increase the sequence number
// and, hence, the receiver's acknowledgement number.
assert!(client.ack_nr == ack_nr);
drop(client);
}
#[test]
fn test_handle_packet() {
//fn test_connection_setup() {
let initial_connection_id: u16 = random();
let sender_connection_id = initial_connection_id + 1;
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
// Do we have a response?
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
// Is is of the correct type?
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
// Same connection id on both ends during connection establishment
assert!(response.connection_id() == packet.connection_id());
// Response acknowledges SYN
assert!(response.ack_nr() == packet.seq_nr());
// No payload?
assert!(response.payload.is_empty());
//}
// ---------------------------------
// fn test_connection_usage() {
let old_packet = packet;
let old_response = response;
let mut packet = UtpPacket::new();
packet.set_type(DataPacket);
packet.set_connection_id(sender_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
// Sender (i.e., who initated connection and sent SYN) has connection id
// equal to initial connection id + 1
// Receiver (i.e., who accepted connection) has connection id equal to
// initial connection id
assert!(response.connection_id() == initial_connection_id);
assert!(response.connection_id() == packet.connection_id() - 1);
// Previous packets should be ack'ed
assert!(response.ack_nr() == packet.seq_nr());
// Responses with no payload should not increase the sequence number
assert!(response.payload.is_empty());
assert!(response.seq_nr() == old_response.seq_nr());
// }
//fn test_connection_teardown() {
let old_packet = packet;
let old_response = response;
let mut packet = UtpPacket::new();
packet.set_type(FinPacket);
packet.set_connection_id(sender_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
// FIN packets have no payload but the sequence number shouldn't increase
assert!(packet.seq_nr() == old_packet.seq_nr() + 1);
// Nor should the ACK packet's sequence number
assert!(response.seq_nr() == old_response.seq_nr());
// FIN should be acknowledged
assert!(response.ack_nr() == packet.seq_nr());
//}
}
#[test]
fn test_response_to_keepalive_ack() {
// Boilerplate test setup
let initial_connection_id: u16 = random();
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
// Establish connection
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
let old_packet = packet;
let old_response = response;
// Now, send a keepalive packet
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_connection_id(initial_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
let response = socket.handle_packet(packet.clone());
assert!(response.is_none());
// Send a second keepalive packet, identical to the previous one
let response = socket.handle_packet(packet.clone());
assert!(response.is_none());
}
#[test]
fn test_response_to_wrong_connection_id() {
// Boilerplate test setup
let initial_connection_id: u16 = random();
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
// Establish connection
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
assert!(response.unwrap().get_type() == StatePacket);
// Now, disrupt connection with a packet with an incorrect connection id
let new_connection_id = initial_connection_id * 2;
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_connection_id(new_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == ResetPacket);
assert!(response.ack_nr() == packet.seq_nr());
}
#[test]
fn test_utp_stream() {
let server_addr = next_test_ip4();
let mut server = iotry!(UtpStream::bind(server_addr));
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.close());
});
iotry!(server.read_to_end());
}
#[test]
fn test_utp_stream_small_data() {
// Fits in a packet
const LEN: uint = 1024;
let data = Vec::from_fn(LEN, |idx| idx as u8);
assert_eq!(LEN, data.len());
let d = data.clone();
let server_addr = next_test_ip4();
let mut server = UtpStream::bind(server_addr);
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(d.as_slice()));
iotry!(client.close());
});
let read = iotry!(server.read_to_end());
assert!(!read.is_empty());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_utp_stream_large_data() {
// Has to be sent over several packets
const LEN: uint = 1024 * 1024;
let data = Vec::from_fn(LEN, |idx| idx as u8);
assert_eq!(LEN, data.len());
let d = data.clone();
let server_addr = next_test_ip4();
let mut server = UtpStream::bind(server_addr);
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(d.as_slice()));
iotry!(client.close());
});
let read = iotry!(server.read_to_end());
assert!(!read.is_empty());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_utp_stream_successive_reads() {
use std::io::Closed;
const LEN: uint = 1024;
let data: Vec<u8> = Vec::from_fn(LEN, |idx| idx as u8);
assert_eq!(LEN, data.len());
let d = data.clone();
let server_addr = next_test_ip4();
let mut server = UtpStream::bind(server_addr);
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(d.as_slice()));
iotry!(client.close());
});
iotry!(server.read_to_end());
let mut buf = [0u8, ..4096];
match server.read(buf) {
Err(ref e) if e.kind == Closed => {},
_ => panic!("should have failed with Closed"),
};
}
#[test]
fn test_unordered_packets() {
// Boilerplate test setup
let initial_connection_id: u16 = random();
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
// Establish connection
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(SynPacket);
packet.set_connection_id(initial_connection_id);
let response = socket.handle_packet(packet.clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.get_type() == StatePacket);
let old_packet = packet;
let old_response = response;
let mut window: Vec<UtpPacket> = Vec::new();
// Now, send a keepalive packet
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(initial_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 1);
packet.set_ack_nr(old_response.seq_nr());
packet.payload = vec!(1,2,3);
window.push(packet);
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(initial_connection_id);
packet.set_seq_nr(old_packet.seq_nr() + 2);
packet.set_ack_nr(old_response.seq_nr());
packet.payload = vec!(4,5,6);
window.push(packet);
// Send packets in reverse order
let response = socket.handle_packet(window[1].clone());
assert!(response.is_some());
let response = response.unwrap();
assert!(response.ack_nr() != window[1].seq_nr());
let response = socket.handle_packet(window[0].clone());
assert!(response.is_some());
}
#[test]
fn test_socket_unordered_packets() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let mut s = client.socket;
let mut window: Vec<UtpPacket> = Vec::new();
for data in Vec::from_fn(12, |idx| idx as u8 + 1).as_slice().chunks(3) {
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(client.sender_connection_id);
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
packet.payload = data.to_vec();
window.push(packet.clone());
client.send_window.push(packet.clone());
client.seq_nr += 1;
}
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(FinPacket);
packet.set_connection_id(client.sender_connection_id);
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
window.push(packet);
client.seq_nr += 1;
iotry!(s.send_to(window[3].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[2].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[1].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[0].bytes().as_slice(), server_addr));
iotry!(s.send_to(window[4].bytes().as_slice(), server_addr));
for _ in range(0u, 2) {
let mut buf = [0, ..BUF_SIZE];
iotry!(s.recv_from(buf));
}
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert!(server.state == SocketConnected);
let mut stream = UtpStream { socket: server };
let expected: Vec<u8> = Vec::from_fn(12, |idx| idx as u8 + 1);
match stream.read_to_end() {
Ok(data) => {
assert_eq!(data.len(), expected.len());
assert_eq!(data, expected);
},
Err(e) => panic!("{}", e),
}
}
#[test]
fn test_socket_should_not_buffer_syn_packets() {
use std::io::net::udp::UdpSocket;
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let server = iotry!(UtpSocket::bind(server_addr));
let client = iotry!(UdpSocket::bind(client_addr));
let test_syn_raw = [0x41, 0x00, 0x41, 0xa7, 0x00, 0x00, 0x00,
0x27, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x3a,
0xf1, 0x00, 0x00];
let test_syn_pkt = UtpPacket::decode(test_syn_raw);
let seq_nr = test_syn_pkt.seq_nr();
spawn(proc() {
let mut client = client;
iotry!(client.send_to(test_syn_raw, server_addr));
client.set_timeout(Some(10));
let mut buf = [0, ..BUF_SIZE];
let packet = match client.recv_from(buf) {
Ok((nread, _src)) => UtpPacket::decode(buf.slice_to(nread)),
Err(e) => panic!("{}", e),
};
assert_eq!(packet.ack_nr(), seq_nr);
drop(client);
});
let mut server = server;
let mut buf = [0, ..20];
iotry!(server.recv_from(buf));
assert!(server.ack_nr != 0);
assert_eq!(server.ack_nr, seq_nr);
assert!(server.incoming_buffer.is_empty());
}
#[test]
fn test_response_to_triple_ack() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let mut server = iotry!(UtpSocket::bind(server_addr));
let client = iotry!(UtpSocket::bind(client_addr));
// Fits in a packet
const LEN: uint = 1024;
let data = Vec::from_fn(LEN, |idx| idx as u8);
let d = data.clone();
assert_eq!(LEN, data.len());
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
iotry!(client.send_to(d.as_slice()));
iotry!(client.close());
});
let mut buf = [0, ..BUF_SIZE];
// Expect SYN
iotry!(server.recv_from(buf));
// Receive data
let mut data_packet;
match server.socket.recv_from(buf) {
Ok((read, _src)) => {
data_packet = UtpPacket::decode(buf.slice_to(read));
assert!(data_packet.get_type() == DataPacket);
assert_eq!(data_packet.payload, data);
assert_eq!(data_packet.payload.len(), data.len());
},
Err(e) => panic!("{}", e),
}
let data_packet = data_packet;
// Send triple ACK
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(StatePacket);
packet.set_seq_nr(server.seq_nr);
packet.set_ack_nr(data_packet.seq_nr() - 1);
packet.set_connection_id(server.sender_connection_id);
for _ in range(0u, 3) {
iotry!(server.socket.send_to(packet.bytes().as_slice(), client_addr));
}
// Receive data again and check that it's the same we reported as missing
match server.socket.recv_from(buf) {
Ok((0, _)) => panic!("Received 0 bytes from socket"),
Ok((read, _src)) => {
let packet = UtpPacket::decode(buf.slice_to(read));
assert_eq!(packet.get_type(), DataPacket);
assert_eq!(packet.seq_nr(), data_packet.seq_nr());
assert!(packet.payload == data_packet.payload);
let response = server.handle_packet(packet).unwrap();
iotry!(server.socket.send_to(response.bytes().as_slice(), server.connected_to));
},
Err(e) => panic!("{}", e),
}
// Receive close
iotry!(server.recv_from(buf));
}
#[test]
fn test_socket_timeout_request() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
let len = 512;
let data = Vec::from_fn(len, |idx| idx as u8);
let d = data.clone();
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
assert_eq!(client.connected_to, server_addr);
iotry!(client.send_to(d.as_slice()));
drop(client);
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert_eq!(server.connected_to, client_addr);
assert!(server.state == SocketConnected);
// Purposefully read from UDP socket directly and discard it, in order
// to behave as if the packet was lost and thus trigger the timeout
// handling in the *next* call to `UtpSocket.recv_from`.
iotry!(server.socket.recv_from(buf));
// Set a much smaller than usual timeout, for quicker test completion
server.congestion_timeout = 50;
// Now wait for the previously discarded packet
loop {
match server.recv_from(buf) {
Ok((0, _)) => continue,
Ok(_) => break,
Err(e) => panic!("{}", e),
}
}
drop(server);
}
#[test]
fn test_sorted_buffer_insertion() {
let server_addr = next_test_ip4();
let mut socket = iotry!(UtpSocket::bind(server_addr));
let mut packet = UtpPacket::new();
packet.set_seq_nr(1);
assert!(socket.incoming_buffer.is_empty());
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 1);
packet.set_seq_nr(2);
packet.set_timestamp_microseconds(128);
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 2);
assert_eq!(socket.incoming_buffer[1].seq_nr(), 2);
assert_eq!(socket.incoming_buffer[1].timestamp_microseconds(), 128);
packet.set_seq_nr(3);
packet.set_timestamp_microseconds(256);
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 3);
assert_eq!(socket.incoming_buffer[2].seq_nr(), 3);
assert_eq!(socket.incoming_buffer[2].timestamp_microseconds(), 256);
// Replace a packet with a more recent version
packet.set_seq_nr(2);
packet.set_timestamp_microseconds(456);
socket.insert_into_buffer(packet.clone());
assert_eq!(socket.incoming_buffer.len(), 3);
assert_eq!(socket.incoming_buffer[1].seq_nr(), 2);
assert_eq!(socket.incoming_buffer[1].timestamp_microseconds(), 456);
}
#[test]
fn test_duplicate_packet_handling() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let client = iotry!(UtpSocket::bind(client_addr));
let mut server = iotry!(UtpSocket::bind(server_addr));
assert!(server.state == SocketNew);
assert!(client.state == SocketNew);
// Check proper difference in client's send connection id and receive connection id
assert_eq!(client.sender_connection_id, client.receiver_connection_id + 1);
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
assert!(client.state == SocketConnected);
let mut s = client.socket.clone();
let mut packet = UtpPacket::new();
packet.set_wnd_size(BUF_SIZE as u32);
packet.set_type(DataPacket);
packet.set_connection_id(client.sender_connection_id);
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
packet.payload = vec!(1,2,3);
// Send two copies of the packet, with different timestamps
for _ in range(0u, 2) {
packet.set_timestamp_microseconds(now_microseconds());
iotry!(s.send_to(packet.bytes().as_slice(), server_addr));
}
client.seq_nr += 1;
// Receive one ACK
for _ in range(0u, 1) {
let mut buf = [0, ..BUF_SIZE];
iotry!(s.recv_from(buf));
}
iotry!(client.close());
});
let mut buf = [0u8, ..BUF_SIZE];
match server.recv_from(buf) {
e => println!("{}", e),
}
// After establishing a new connection, the server's ids are a mirror of the client's.
assert_eq!(server.receiver_connection_id, server.sender_connection_id + 1);
assert!(server.state == SocketConnected);
let mut stream = UtpStream { socket: server };
let expected: Vec<u8> = vec!(1,2,3);
match stream.read_to_end() {
Ok(data) => {
println!("{}", data);
assert_eq!(data.len(), expected.len());
assert_eq!(data, expected);
},
Err(e) => panic!("{}", e),
}
}
#[test]
fn test_selective_ack_response() {
let server_addr = next_test_ip4();
let len = 1024 * 10;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
// Client
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
client.socket.congestion_timeout = 50;
// Stream.write
iotry!(client.write(to_send.as_slice()));
iotry!(client.close());
});
// Server
let mut server = iotry!(UtpSocket::bind(server_addr));
let mut buf = [0, ..BUF_SIZE];
// Connect
iotry!(server.recv_from(buf));
// Discard packets
iotry!(server.socket.recv_from(buf));
iotry!(server.socket.recv_from(buf));
iotry!(server.socket.recv_from(buf));
// Generate SACK
let mut packet = UtpPacket::new();
packet.set_seq_nr(server.seq_nr);
packet.set_ack_nr(server.ack_nr - 1);
packet.set_connection_id(server.sender_connection_id);
packet.set_timestamp_microseconds(now_microseconds());
packet.set_type(StatePacket);
packet.set_sack(Some(vec!(12, 0, 0, 0)));
// Send SACK
iotry!(server.socket.send_to(packet.bytes().as_slice(), server.connected_to.clone()));
// Expect to receive "missing" packets
let mut stream = UtpStream { socket: server };
let read = iotry!(stream.read_to_end());
assert!(!read.is_empty());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_correct_packet_loss() {
let (client_addr, server_addr) = (next_test_ip4(), next_test_ip4());
let mut server = iotry!(UtpStream::bind(server_addr));
let client = iotry!(UtpSocket::bind(client_addr));
let len = 1024 * 10;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
spawn(proc() {
let mut client = iotry!(client.connect(server_addr));
// Send everything except the odd chunks
let chunks = to_send.as_slice().chunks(BUF_SIZE);
let dst = client.connected_to;
for (index, chunk) in chunks.enumerate() {
let mut packet = UtpPacket::new();
packet.set_seq_nr(client.seq_nr);
packet.set_ack_nr(client.ack_nr);
packet.set_connection_id(client.sender_connection_id);
packet.set_timestamp_microseconds(now_microseconds());
packet.payload = chunk.to_vec();
packet.set_type(DataPacket);
if index % 2 == 0 {
iotry!(client.socket.send_to(packet.bytes().as_slice(), dst));
}
client.send_window.push(packet);
client.seq_nr += 1;
}
iotry!(client.close());
});
let read = iotry!(server.read_to_end());
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_tolerance_to_small_buffers() {
use std::io::EndOfFile;
let server_addr = next_test_ip4();
let mut server = iotry!(UtpSocket::bind(server_addr));
let len = 1024;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
spawn(proc() {
let mut client = iotry!(UtpStream::connect(server_addr));
iotry!(client.write(to_send.as_slice()));
iotry!(client.close());
});
let mut read = Vec::new();
while server.state != SocketClosed {
let mut small_buffer = [0, ..512];
match server.recv_from(small_buffer) {
Ok((0, _src)) => (),
Ok((len, _src)) => read.push_all(small_buffer.slice_to(len)),
Err(ref e) if e.kind == EndOfFile => break,
Err(e) => panic!("{}", e),
}
}
assert_eq!(read.len(), data.len());
assert_eq!(read, data);
}
#[test]
fn test_sequence_number_rollover() {
let (server_addr, client_addr) = (next_test_ip4(), next_test_ip4());
let mut server = UtpStream::bind(server_addr);
let len = BUF_SIZE * 4;
let data = Vec::from_fn(len, |idx| idx as u8);
let to_send = data.clone();
spawn(proc() {
let mut socket = iotry!(UtpSocket::bind(client_addr));
// Advance socket's sequence number
socket.seq_nr = ::std::u16::MAX - (to_send.len() / (BUF_SIZE * 2)) as u16;
let socket = iotry!(socket.connect(server_addr));
let mut client = UtpStream { socket: socket };
// Send enough data to rollover
iotry!(client.write(to_send.as_slice()));
// Check that the sequence number did rollover
assert!(client.socket.seq_nr < 50);
// Close connection
iotry!(client.close());
});
let received = iotry!(server.read_to_end());
assert_eq!(received.len(), data.len());
assert_eq!(received, data);
}
}
|
#![feature(collections)]
#![feature(convert)]
use std::borrow::Cow;
use std::borrow::Cow::{Borrowed};
pub fn compile(sass: &str, style: &str) -> Result<String, &'static str> {
let mut st = SassTokenizer::new(&sass);
match style {
"nested" => Ok(nested_output(&mut st)),
"compressed" => Ok(compressed_output(&mut st)),
"expanded" => Ok(expanded_output(&mut st)),
"compact" => Ok(compact_output(&mut st)),
"debug" => Ok(debug_output(&mut st)),
_ => Err("Unknown style:. Please specify one of nested, compressed, expanded, or compact."),
}
}
pub fn nested_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => format!("{} ", name),
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!("{{\n {}: {};", name, value),
_ => format!("\n {}: {};", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!(" }}\n"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn compressed_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => {
match last {
Event::Selector(_) => format!(" {}", name),
_ => format!("{}", name),
}
},
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!("{{{}:{}", name, value),
_ => format!("{}:{}", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!("}}"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn expanded_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => {
match last {
Event::Selector(_) => format!(" {}", name),
_ => format!("{}", name),
}
},
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!(" {{\n {}: {};", name, value),
_ => format!("\n {}: {};", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!("\n}}"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn compact_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => {
match last {
Event::Selector(_) => format!(" {}", name),
_ => format!("{}", name),
}
},
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!(" {{ {}: {};", name, value),
_ => format!(" {}: {};", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!(" }}"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn debug_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
while let Some(token) = tokenizer.next() {
output.push_str(format!("{:?}\n", token).as_str());
}
output
}
#[derive(PartialEq, Debug)]
pub enum State {
StartRule,
InSelectors,
InProperties,
}
#[derive(Debug,Clone)]
pub enum Rule {
SassRule,
}
#[derive(Debug,Clone)]
pub enum Event<'a> {
Start(Rule),
End(Rule),
Selector(Cow<'a, str>),
Property(Cow<'a, str>, Cow<'a, str>),
}
#[derive(Debug)]
pub struct SassTokenizer<'a> {
sass: &'a str,
offset: usize,
stack: Vec<Rule>,
state: State,
}
impl<'a> SassTokenizer<'a> {
pub fn new(sass: &'a str) -> SassTokenizer<'a> {
SassTokenizer {
sass: &sass,
offset: 0,
stack: Vec::new(),
state: State::StartRule,
}
}
pub fn start_rule(&mut self) -> Option<Event<'a>> {
self.skip_leading_whitespace();
if self.offset == self.sass.len() {
return None
}
let c = self.sass.as_bytes()[self.offset];
if c == b'}' {
self.offset += 1;
return Some(self.end())
}
self.state = State::InSelectors;
self.stack.push(Rule::SassRule);
Some(Event::Start(Rule::SassRule))
}
fn end(&mut self) -> Event<'a> {
let rule = self.stack.pop().unwrap();
self.state = State::StartRule;
Event::End(rule)
}
fn scan_while<F>(&mut self, data: &str, f: F) -> usize
where F: Fn(u8) -> bool {
match data.as_bytes().iter().position(|&c| !f(c)) {
Some(i) => i,
None => data.len()
}
}
fn skip_leading_whitespace(&mut self) {
let i = self.offset;
self.offset += self.scan_while(&self.sass[i..self.sass.len()], is_ascii_whitespace);
}
pub fn next_property(&mut self) -> Event<'a> {
self.skip_leading_whitespace();
let bytes = self.sass.as_bytes();
let name_beginning = self.offset;
let mut i = name_beginning;
let limit = self.sass.len();
let c = bytes[i];
if c == b'}' {
self.offset += 1;
return self.end()
}
while i < limit {
match bytes[i..limit].iter().position(|&c| c == b':' || c == b'{') {
Some(pos) => { i += pos; },
None => { break; },
}
// Inefficient since we already skipped the whitespace and we'll have to
// do it again but oh well
let c = bytes[i];
if c == b'{' {
self.state = State::InSelectors;
self.stack.push(Rule::SassRule);
return Event::Start(Rule::SassRule)
}
let name_end = i;
i += 1;
self.offset = i;
self.skip_leading_whitespace();
let value_beginning = self.offset;
i = value_beginning;
while i < limit {
match bytes[i..limit].iter().position(|&c| c == b';') {
Some(pos) => { i += pos; },
None => { i = limit; break; },
}
let value_end = i;
self.offset = i + 1;
self.skip_leading_whitespace();
return Event::Property(
Borrowed(&self.sass[name_beginning..name_end]),
Borrowed(&self.sass[value_beginning..value_end])
)
}
}
self.offset = self.sass.len();
Event::Property(Borrowed(""), Borrowed(""))
}
pub fn next_selector(&mut self) -> Event<'a> {
let bytes = self.sass.as_bytes();
let beginning = self.offset;
let mut i = beginning;
let limit = self.sass.len();
while i < limit {
match bytes[i..limit].iter().position(|&c| c == b',' || c == b'{') {
Some(pos) => { i += pos; },
None => { i = limit; break; },
}
let c = bytes[i];
if c == b',' || c == b'{' {
let n = scan_trailing_whitespace(&self.sass[beginning..i]);
let end = i - n;
if end > beginning {
if c == b'{' {
self.state = State::InProperties;
}
self.offset = i + 1;
return Event::Selector(Borrowed(&self.sass[beginning..end]));
}
}
self.offset = i;
if i > beginning {
return Event::Selector(Borrowed(&self.sass[beginning..i]))
}
i += 1;
}
if i > beginning {
self.offset = i;
Event::Selector(Borrowed(&self.sass[beginning..i]))
} else {
self.end()
}
}
}
impl<'a> Iterator for SassTokenizer<'a> {
type Item = Event<'a>;
fn next(&mut self) -> Option<Event<'a>> {
if self.offset < self.sass.len() {
match self.state {
State::StartRule => {
let ret = self.start_rule();
if ret.is_some() {
return ret
}
},
State::InSelectors => return Some(self.next_selector()),
State::InProperties => return Some(self.next_property()),
}
}
None
}
}
pub fn char_at(s: &str, byte: usize) -> char {
s[byte..].chars().next().unwrap()
}
pub fn is_ascii_whitespace(c: u8) -> bool {
c == b'\n' || c == b'\r' || is_ascii_whitespace_no_nl(c)
}
pub fn is_ascii_whitespace_no_nl(c: u8) -> bool {
c == b'\t' || c == 0x0b || c == 0x0c || c == b' '
}
// unusual among "scan" functions in that it scans from the _back_ of the string
// TODO: should also scan unicode whitespace?
pub fn scan_trailing_whitespace(data: &str) -> usize {
match data.as_bytes().iter().rev().position(|&c| !is_ascii_whitespace_no_nl(c)) {
Some(i) => i,
None => data.len()
}
}
Don't panic on some inputs not handled yet so all specs run
#![feature(collections)]
#![feature(convert)]
use std::borrow::Cow;
use std::borrow::Cow::{Borrowed};
pub fn compile(sass: &str, style: &str) -> Result<String, &'static str> {
let mut st = SassTokenizer::new(&sass);
match style {
"nested" => Ok(nested_output(&mut st)),
"compressed" => Ok(compressed_output(&mut st)),
"expanded" => Ok(expanded_output(&mut st)),
"compact" => Ok(compact_output(&mut st)),
"debug" => Ok(debug_output(&mut st)),
_ => Err("Unknown style:. Please specify one of nested, compressed, expanded, or compact."),
}
}
pub fn nested_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => format!("{} ", name),
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!("{{\n {}: {};", name, value),
_ => format!("\n {}: {};", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!(" }}\n"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn compressed_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => {
match last {
Event::Selector(_) => format!(" {}", name),
_ => format!("{}", name),
}
},
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!("{{{}:{}", name, value),
_ => format!("{}:{}", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!("}}"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn expanded_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => {
match last {
Event::Selector(_) => format!(" {}", name),
_ => format!("{}", name),
}
},
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!(" {{\n {}: {};", name, value),
_ => format!("\n {}: {};", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!("\n}}"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn compact_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
let mut last = Event::End(Rule::SassRule);
while let Some(token) = tokenizer.next() {
let print_token = match token.clone() {
Event::Start(_) => continue,
Event::Selector(name) => {
match last {
Event::Selector(_) => format!(" {}", name),
_ => format!("{}", name),
}
},
Event::Property(name, value) => {
match last {
Event::Selector(_) => format!(" {{ {}: {};", name, value),
_ => format!(" {}: {};", name, value),
}
},
Event::End(_) => {
match last {
Event::End(_) => continue,
_ => format!(" }}"),
}
},
};
output.push_str(print_token.as_str());
last = token;
}
output
}
pub fn debug_output(tokenizer: &mut SassTokenizer) -> String {
let mut output = String::from_str("");
while let Some(token) = tokenizer.next() {
output.push_str(format!("{:?}\n", token).as_str());
}
output
}
#[derive(PartialEq, Debug)]
pub enum State {
StartRule,
InSelectors,
InProperties,
}
#[derive(Debug,Clone)]
pub enum Rule {
SassRule,
}
#[derive(Debug,Clone)]
pub enum Event<'a> {
Start(Rule),
End(Rule),
Selector(Cow<'a, str>),
Property(Cow<'a, str>, Cow<'a, str>),
}
#[derive(Debug)]
pub struct SassTokenizer<'a> {
sass: &'a str,
offset: usize,
stack: Vec<Rule>,
state: State,
}
impl<'a> SassTokenizer<'a> {
pub fn new(sass: &'a str) -> SassTokenizer<'a> {
SassTokenizer {
sass: &sass,
offset: 0,
stack: Vec::new(),
state: State::StartRule,
}
}
pub fn start_rule(&mut self) -> Option<Event<'a>> {
self.skip_leading_whitespace();
if self.offset == self.sass.len() {
return None
}
let c = self.sass.as_bytes()[self.offset];
if c == b'}' {
self.offset += 1;
return Some(self.end())
}
self.state = State::InSelectors;
self.stack.push(Rule::SassRule);
Some(Event::Start(Rule::SassRule))
}
fn end(&mut self) -> Event<'a> {
let rule = match self.stack.pop() {
Some(r) => r,
None => {
println!("Unexpected empty stack!");
return Event::End(Rule::SassRule)
},
};
self.state = State::StartRule;
Event::End(rule)
}
fn scan_while<F>(&mut self, data: &str, f: F) -> usize
where F: Fn(u8) -> bool {
match data.as_bytes().iter().position(|&c| !f(c)) {
Some(i) => i,
None => data.len()
}
}
fn skip_leading_whitespace(&mut self) {
let i = self.offset;
self.offset += self.scan_while(&self.sass[i..self.sass.len()], is_ascii_whitespace);
}
pub fn next_property(&mut self) -> Event<'a> {
self.skip_leading_whitespace();
let bytes = self.sass.as_bytes();
let name_beginning = self.offset;
let mut i = name_beginning;
let limit = self.sass.len();
let c = bytes[i];
if c == b'}' {
self.offset += 1;
return self.end()
}
while i < limit {
match bytes[i..limit].iter().position(|&c| c == b':' || c == b'{') {
Some(pos) => { i += pos; },
None => { break; },
}
// Inefficient since we already skipped the whitespace and we'll have to
// do it again but oh well
let c = bytes[i];
if c == b'{' {
self.state = State::InSelectors;
self.stack.push(Rule::SassRule);
return Event::Start(Rule::SassRule)
}
let name_end = i;
i += 1;
self.offset = i;
self.skip_leading_whitespace();
let value_beginning = self.offset;
i = value_beginning;
while i < limit {
match bytes[i..limit].iter().position(|&c| c == b';') {
Some(pos) => { i += pos; },
None => { i = limit; break; },
}
let value_end = i;
self.offset = i + 1;
self.skip_leading_whitespace();
return Event::Property(
Borrowed(&self.sass[name_beginning..name_end]),
Borrowed(&self.sass[value_beginning..value_end])
)
}
}
self.offset = self.sass.len();
Event::Property(Borrowed(""), Borrowed(""))
}
pub fn next_selector(&mut self) -> Event<'a> {
let bytes = self.sass.as_bytes();
let beginning = self.offset;
let mut i = beginning;
let limit = self.sass.len();
while i < limit {
match bytes[i..limit].iter().position(|&c| c == b',' || c == b'{') {
Some(pos) => { i += pos; },
None => { i = limit; break; },
}
let c = bytes[i];
if c == b',' || c == b'{' {
let n = scan_trailing_whitespace(&self.sass[beginning..i]);
let end = i - n;
if end > beginning {
if c == b'{' {
self.state = State::InProperties;
}
self.offset = i + 1;
return Event::Selector(Borrowed(&self.sass[beginning..end]));
}
}
self.offset = i;
if i > beginning {
return Event::Selector(Borrowed(&self.sass[beginning..i]))
}
i += 1;
}
if i > beginning {
self.offset = i;
Event::Selector(Borrowed(&self.sass[beginning..i]))
} else {
self.end()
}
}
}
impl<'a> Iterator for SassTokenizer<'a> {
type Item = Event<'a>;
fn next(&mut self) -> Option<Event<'a>> {
if self.offset < self.sass.len() {
match self.state {
State::StartRule => {
let ret = self.start_rule();
if ret.is_some() {
return ret
}
},
State::InSelectors => return Some(self.next_selector()),
State::InProperties => return Some(self.next_property()),
}
}
None
}
}
pub fn char_at(s: &str, byte: usize) -> char {
s[byte..].chars().next().unwrap()
}
pub fn is_ascii_whitespace(c: u8) -> bool {
c == b'\n' || c == b'\r' || is_ascii_whitespace_no_nl(c)
}
pub fn is_ascii_whitespace_no_nl(c: u8) -> bool {
c == b'\t' || c == 0x0b || c == 0x0c || c == b' '
}
// unusual among "scan" functions in that it scans from the _back_ of the string
// TODO: should also scan unicode whitespace?
pub fn scan_trailing_whitespace(data: &str) -> usize {
match data.as_bytes().iter().rev().position(|&c| !is_ascii_whitespace_no_nl(c)) {
Some(i) => i,
None => data.len()
}
}
|
//! disque-rs is a rust implementation of a Disque client library.
//! It uses redis-rs to handle the connection and low level protocol.
//!
//! The crate is called `disque` and you can depend on it via cargo:
//!
//! ``ini
//! [dependencies.disque]
//! version = "*"
//! ```
//!
//! ## Connection Parameters
//!
//! disque-rs knows different ways to define where a connection should
//! go. The parameter to `Disque::open` needs to implement the
//! `IntoConnectionInfo` trait of which there are three implementations:
//!
//! * string slices in `redis://` URL format.
//! * URL objects from the redis-url crate.
//! * `ConnectionInfo` objects.
//!
//! The URL format is `redis://[:<passwd>@]<hostname>[:port][/<db>]`
//!
//! Notice the scheme is actually "redis" because it uses the Redis protocol.
//! By default, it will use port 6379, although Disque uses 7711.
//!
//! ## Unix Sockets
//!
//! For unix socket support, install `redis` with the feature "unix_socket".
//!
//! ## Command reference
//!
//! The commands are a direct implementation of Disque commands. To read a
//! reference about their meaning, go to https://github.com/antirez/disque
#![crate_name = "disque"]
#![crate_type = "lib"]
extern crate redis;
use std::collections::HashMap;
use std::time::Duration;
use redis::{Connection, RedisError, cmd, Value, ErrorKind, FromRedisValue,
IntoConnectionInfo, Iter, RedisResult, InfoDict};
fn duration_to_millis(d: &Duration) -> u64 {
(d.subsec_nanos() / 1_000_000) as u64 + d.as_secs()
}
macro_rules! option_arg {
($disque: expr, $name: expr, $param: expr) => (
match $param {
Some(u) => $disque.arg($name).arg(u),
None => &mut $disque,
};
)
}
pub struct Disque {
connection: Connection,
}
impl Disque {
/// Opens a new connection to a Disque server.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// ```
pub fn open<T: IntoConnectionInfo>(params: T) -> RedisResult<Disque> {
let client = try!(redis::Client::open(params));
let connection = try!(client.get_connection());
Ok(Disque { connection: connection })
}
/// The hello command returns information about the disque cluster.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// let (_, nodeid, _) = disque.hello().unwrap();
/// println!("Connected to node {}", nodeid);
/// ```
pub fn hello(&self) -> RedisResult<(u8, String, Vec<(String, String, u16, u32)>)> {
let mut items = match try!(cmd("HELLO").query(&self.connection)) {
Value::Bulk(items) => items,
_ => return Err(RedisError::from((ErrorKind::TypeError,
"Expected multi-bulk"))),
};
if items.len() != 3 {
return Err(RedisError::from((ErrorKind::TypeError,
"Expected multi-bulk with size 3")));
}
let nodes = try!(Vec::from_redis_value(&items.pop().unwrap()));
let nodeid = try!(String::from_redis_value(&items.pop().unwrap()));
let hellov = try!(u8::from_redis_value(&items.pop().unwrap()));
Ok((hellov, nodeid, nodes))
}
/// Adds a job to a queue.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// # use std::time::Duration;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// let jobid = disque.addjob(b"my queue", b"my job",
/// Duration::from_secs(10), None, None, None, None, None, false
/// ).unwrap();
/// println!("My job id is {}", jobid);
/// ```
pub fn addjob(&self, queue_name: &[u8], job: &[u8], timeout: Duration,
replicate: Option<usize>, delay: Option<Duration>,
retry: Option<Duration>, ttl: Option<Duration>,
maxlen: Option<usize>, async: bool,
) -> RedisResult<String> {
let mut c = cmd("ADDJOB");
c
.arg(queue_name)
.arg(job)
.arg(duration_to_millis(&timeout));
option_arg!(c, "REPLICATE", replicate);
option_arg!(c, "DELAY", delay.map(|x| x.as_secs()));
option_arg!(c, "RETRY", retry.map(|x| x.as_secs()));
option_arg!(c, "TTL", ttl.map(|x| x.as_secs()));
option_arg!(c, "MAXLEN", maxlen);
if async { c.arg("ASYNC"); }
c.query(&self.connection)
}
/// Gets up to `count` jobs from certain `queues`.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// # use std::time::Duration;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// let queue = b"my getjob_count queue";
/// disque.addjob(queue, b"my job 1", Duration::from_secs(10),
/// None, None, None, None, None, false
/// ).unwrap();
/// disque.addjob(queue, b"my job 2", Duration::from_secs(10),
/// None, None, None, None, None, false
/// ).unwrap();
///
/// let jobs = disque.getjob_count(true, None, 10, false, &[queue]).unwrap();
/// assert_eq!(jobs.len(), 2);
/// assert_eq!(jobs[0][2], b"my job 1");
/// assert_eq!(jobs[1][2], b"my job 2");
/// ```
pub fn getjob_count(&self, nohang: bool, timeout: Option<Duration>,
count: usize, withcounters: bool, queues: &[&[u8]]
) -> RedisResult<Vec<Vec<Vec<u8>>>> {
let mut c = cmd("GETJOB");
if nohang { c.arg("NOHANG"); }
option_arg!(c, "TIMEOUT", timeout.map(|t| duration_to_millis(&t)));
c.arg("COUNT").arg(count);
if withcounters { c.arg("WITHCOUNTERS"); }
c.arg("FROM");
for queue in queues { c.arg(*queue); }
c.query(&self.connection)
}
/// Gets a single job from any of the specified `queues`.
pub fn getjob(&self, nohang: bool, timeout: Option<Duration>,
withcounters: bool, queues: &[&[u8]]
) -> RedisResult<Option<Vec<Vec<u8>>>> {
let mut jobs = try!(self.getjob_count(nohang, timeout, 1, withcounters,
queues));
Ok(jobs.pop())
}
/// Acknowledge jobs.
pub fn ackjob(&self, jobids: &[&[u8]]) -> RedisResult<bool> {
let mut c = cmd("ACKJOB");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Fast acknowledge jobs.
pub fn fastack(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("FASTACK");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Tell Disque that a job is still processed.
pub fn working(&self, jobid: &[u8]) -> RedisResult<Duration> {
let retry = try!(cmd("WORKING").arg(jobid).query(&self.connection));
Ok(Duration::from_secs(retry))
}
/// Tells Disque to put back the job in the queue ASAP. Should be used when
/// the worker was not able to process a message and wants the message to
/// be put back into the queue in order to be processed again.
pub fn nack(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("NACK");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Information about the server
pub fn info(&self) -> RedisResult<InfoDict> {
cmd("INFO").query(&self.connection)
}
/// Size of the queue
pub fn qlen(&self, queue_name: &[u8]) -> RedisResult<usize> {
cmd("QLEN").arg(queue_name).query(&self.connection)
}
/// Gets jobs from `queue_name` up to the absolute number of `count`.
/// If count is negative, it will be from newest to oldest.
pub fn qpeek(&self, queue_name: &[u8], count: i64
) -> RedisResult<Vec<Vec<Vec<u8>>>> {
cmd("QPEEK").arg(queue_name).arg(count).query(&self.connection)
}
/// Queue jobs
pub fn enqueue(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("ENQUEUE");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Remove jobs from queue
pub fn dequeue(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("DEQUEUE");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Completely delete a job from a single node.
pub fn deljob(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("DELJOB");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Returns full information about a job, like its current state and data.
pub fn show(&self, jobid: &[u8]) -> RedisResult<HashMap<String, Value>> {
let info:Value = try!(cmd("SHOW").arg(jobid).query(&self.connection));
let mut h = HashMap::new();
let mut items = match info {
Value::Bulk(items) => items,
_ => return Err(RedisError::from((ErrorKind::TypeError,
"Expected multi-bulk"))),
};
if items.len() % 2 != 0 {
return Err(RedisError::from((ErrorKind::TypeError,
"Expected an even number of elements")));
}
while items.len() > 0 {
let value = items.pop().unwrap();
let key:String = try!(String::from_redis_value(&items.pop().unwrap()));
h.insert(key, value);
}
Ok(h)
}
/// Iterator to run all queues that fulfil a criteria.
/// The iterator will batch into segments of approximate `count` size.
pub fn qscan(&self, cursor: u64, count: u64, busyloop: bool,
minlen: Option<u64>, maxlen: Option<u64>, importrate: Option<u64>
) -> RedisResult<Iter<Vec<u8>>> {
let mut c = cmd("QSCAN");
c.arg("COUNT").arg(count);
if busyloop { c.arg("BUSYLOOP"); }
option_arg!(c, "MINLEN", minlen);
option_arg!(c, "MAXLEN", maxlen);
option_arg!(c, "IMPORTRATE", importrate);
c.cursor_arg(cursor).iter(&self.connection)
}
/// Iterator to run all jobs that fulfil a criteria.
/// The iterator will batch into segments of approximate `count` size.
pub fn jscan_id(&self, cursor: u64, count: u64, blocking: bool,
queue: Option<&[u8]>, states: &[&str]
) -> RedisResult<Iter<String>> {
let mut c = cmd("JSCAN");
c.arg("COUNT").arg(count);
if blocking { c.arg("BLOCKING"); }
option_arg!(c, "QUEUE", queue);
for state in states {
c.arg("STATE").arg(*state);
}
c.cursor_arg(cursor).iter(&self.connection)
}
}
#[cfg(test)]
fn conn() -> Disque {
Disque::open("redis://127.0.0.1:7711/").unwrap()
}
#[test]
fn can_connect() {
conn();
}
#[test]
fn hello() {
let disque = conn();
let (v, nodeid, nodes) = disque.hello().unwrap();
assert_eq!(v, 1);
assert!(nodes.into_iter().map(|n| n.0).collect::<Vec<_>>().contains(&nodeid));
}
#[test]
fn addjob() {
let disque = conn();
let jobid = disque.addjob(b"queue", b"job", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(jobid.len(), 48);
assert_eq!(&jobid[..2], "DI");
assert_eq!(&jobid[46..], "SQ");
}
#[test]
fn getjob_count() {
let disque = conn();
let j1 = disque.addjob(b"queue1", b"job1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue2", b"job2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue3", b"job3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let jobs = disque.getjob_count(false, None, 3, true, &[b"queue1", b"queue2"]).unwrap();
assert_eq!(jobs.len(), 2);
assert_eq!(jobs[0][0], b"queue1");
assert_eq!(jobs[0][1], j1.into_bytes());
assert_eq!(jobs[0][2], b"job1");
assert_eq!(jobs[1][0], b"queue2");
assert_eq!(jobs[1][1], j2.into_bytes());
assert_eq!(jobs[1][2], b"job2");
}
#[test]
fn getjob() {
let disque = conn();
let jobid = disque.addjob(b"queue4", b"job4", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let job = disque.getjob(false, None, true, &[b"queue4", b"queue5"]).unwrap().unwrap();
assert_eq!(job[0], b"queue4");
assert_eq!(job[1], jobid.into_bytes());
assert_eq!(job[2], b"job4");
}
#[test]
fn ackjob() {
let disque = conn();
let jobid = disque.addjob(b"queue6", b"job6", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.ackjob(&[jobid.as_bytes()]).unwrap());
// FIXME: crashes disque-server, see https://github.com/antirez/disque/issues/113
// assert!(!disque.ackjob(&[jobid.as_bytes()]).unwrap());
// assert!(!disque.ackjob(&[jobid.as_bytes()]).unwrap());
}
#[test]
fn fastack() {
let disque = conn();
let jobid = disque.addjob(b"queue7", b"job7", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.fastack(&[jobid.as_bytes()]).unwrap() == 1);
assert!(disque.fastack(&[jobid.as_bytes()]).unwrap() == 0);
assert!(disque.fastack(&[jobid.as_bytes()]).unwrap() == 0);
}
#[test]
fn working() {
let disque = conn();
let jobid = disque.addjob(b"queue8", b"job8", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.working(jobid.as_bytes()).unwrap().as_secs() > 0);
}
#[test]
fn nack() {
let disque = conn();
let j1 = disque.addjob(b"queue9", b"job9.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue9", b"job9.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j3 = disque.addjob(b"queue9", b"job9.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.getjob_count(false, None, 100, true, &[b"queue9"]).unwrap().len(), 3);
assert_eq!(disque.nack(&[j1.as_bytes(), j2.as_bytes(), j3.as_bytes()]).unwrap(), 3);
assert_eq!(disque.getjob_count(false, None, 100, true, &[b"queue9"]).unwrap().len(), 3);
}
#[test]
fn info() {
let disque = conn();
let info = disque.info().unwrap();
let _:String = info.get("disque_version").unwrap();
}
#[test]
fn qlen() {
let disque = conn();
disque.addjob(b"queue10", b"job10", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue10", b"job10", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue10", b"job10", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.qlen(b"queue10").unwrap(), 3);
assert_eq!(disque.getjob_count(false, None, 100, true, &[b"queue10"]).unwrap().len(), 3);
}
#[test]
fn qpeek() {
let disque = conn();
let j1 = disque.addjob(b"queue11", b"job11.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue11", b"job11.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.qpeek(b"queue11", 10).unwrap(), vec![
vec![
b"queue11".to_vec(),
j1.as_bytes().to_vec(),
b"job11.1".to_vec(),
],
vec![
b"queue11".to_vec(),
j2.as_bytes().to_vec(),
b"job11.2".to_vec(),
],
]);
assert_eq!(disque.qpeek(b"queue11", -10).unwrap(), vec![
vec![
b"queue11".to_vec(),
j2.as_bytes().to_vec(),
b"job11.2".to_vec(),
],
vec![
b"queue11".to_vec(),
j1.as_bytes().to_vec(),
b"job11.1".to_vec(),
],
]);
assert_eq!(disque.getjob_count(false, None, 100, true, &[b"queue11"]).unwrap().len(), 2);
}
#[test]
fn enqueue() {
let disque = conn();
let j1 = disque.addjob(b"queue12", b"job12.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue12", b"job12.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j3 = disque.addjob(b"queue12", b"job12.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.getjob_count(false, None, 100, true, &[b"queue12"]).unwrap().len(), 3);
assert_eq!(disque.enqueue(&[j1.as_bytes(), j2.as_bytes(), j3.as_bytes()]).unwrap(), 3);
assert_eq!(disque.getjob_count(false, None, 100, true, &[b"queue12"]).unwrap().len(), 3);
}
#[test]
fn dequeue() {
let disque = conn();
let j1 = disque.addjob(b"queue13", b"job13.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue13", b"job13.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j3 = disque.addjob(b"queue13", b"job13.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.dequeue(&[j1.as_bytes(), j2.as_bytes(), j3.as_bytes()]).unwrap(), 3);
assert_eq!(disque.getjob_count(true, None, 100, true, &[b"queue13"]).unwrap().len(), 0);
}
#[test]
fn deljob() {
let disque = conn();
let j1 = disque.addjob(b"queue14", b"job14.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue14", b"job14.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue14", b"job14.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.deljob(&[j1.as_bytes(), j2.as_bytes()]).unwrap(), 2);
assert_eq!(disque.getjob_count(true, None, 100, true, &[b"queue14"]).unwrap().len(), 1);
}
#[test]
fn show() {
let disque = conn();
let jobid = disque.addjob(b"queue15", b"job15", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let info = disque.show(jobid.as_bytes()).unwrap();
assert_eq!(info.get("id").unwrap(), &Value::Data(jobid.as_bytes().to_vec()));
assert_eq!(info.get("queue").unwrap(), &Value::Data(b"queue15".to_vec()));
assert_eq!(info.get("state").unwrap(), &Value::Data(b"queued".to_vec()));
}
#[test]
fn qscan() {
let disque = conn();
disque.addjob(b"queue16", b"job16", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let queues = disque.qscan(0, 1000, false, None, None, None).unwrap().collect::<Vec<_>>();
assert!(queues.contains(&b"queue16".to_vec()));
}
#[test]
fn jscan_id() {
let disque = conn();
let job = disque.addjob(b"queue17", b"job17", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.jscan_id(0, 1000, false, None, &[]).unwrap().collect::<Vec<_>>().contains(&job));
assert!(!disque.jscan_id(0, 1000, false, Some(b"queue16"), &[]).unwrap().collect::<Vec<_>>().contains(&job));
assert!(disque.jscan_id(0, 1000, false, Some(b"queue17"), &[]).unwrap().collect::<Vec<_>>().contains(&job));
}
getjob returns a tuple and does not accept withscores
//! disque-rs is a rust implementation of a Disque client library.
//! It uses redis-rs to handle the connection and low level protocol.
//!
//! The crate is called `disque` and you can depend on it via cargo:
//!
//! ``ini
//! [dependencies.disque]
//! version = "*"
//! ```
//!
//! ## Connection Parameters
//!
//! disque-rs knows different ways to define where a connection should
//! go. The parameter to `Disque::open` needs to implement the
//! `IntoConnectionInfo` trait of which there are three implementations:
//!
//! * string slices in `redis://` URL format.
//! * URL objects from the redis-url crate.
//! * `ConnectionInfo` objects.
//!
//! The URL format is `redis://[:<passwd>@]<hostname>[:port][/<db>]`
//!
//! Notice the scheme is actually "redis" because it uses the Redis protocol.
//! By default, it will use port 6379, although Disque uses 7711.
//!
//! ## Unix Sockets
//!
//! For unix socket support, install `redis` with the feature "unix_socket".
//!
//! ## Command reference
//!
//! The commands are a direct implementation of Disque commands. To read a
//! reference about their meaning, go to https://github.com/antirez/disque
#![crate_name = "disque"]
#![crate_type = "lib"]
extern crate redis;
use std::collections::HashMap;
use std::time::Duration;
use redis::{Connection, RedisError, cmd, Value, ErrorKind, FromRedisValue,
IntoConnectionInfo, Iter, RedisResult, InfoDict};
fn duration_to_millis(d: &Duration) -> u64 {
(d.subsec_nanos() / 1_000_000) as u64 + d.as_secs()
}
macro_rules! option_arg {
($disque: expr, $name: expr, $param: expr) => (
match $param {
Some(u) => $disque.arg($name).arg(u),
None => &mut $disque,
};
)
}
pub struct Disque {
connection: Connection,
}
impl Disque {
/// Opens a new connection to a Disque server.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// ```
pub fn open<T: IntoConnectionInfo>(params: T) -> RedisResult<Disque> {
let client = try!(redis::Client::open(params));
let connection = try!(client.get_connection());
Ok(Disque { connection: connection })
}
/// The hello command returns information about the disque cluster.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// let (_, nodeid, _) = disque.hello().unwrap();
/// println!("Connected to node {}", nodeid);
/// ```
pub fn hello(&self) -> RedisResult<(u8, String, Vec<(String, String, u16, u32)>)> {
let mut items = match try!(cmd("HELLO").query(&self.connection)) {
Value::Bulk(items) => items,
_ => return Err(RedisError::from((ErrorKind::TypeError,
"Expected multi-bulk"))),
};
if items.len() != 3 {
return Err(RedisError::from((ErrorKind::TypeError,
"Expected multi-bulk with size 3")));
}
let nodes = try!(Vec::from_redis_value(&items.pop().unwrap()));
let nodeid = try!(String::from_redis_value(&items.pop().unwrap()));
let hellov = try!(u8::from_redis_value(&items.pop().unwrap()));
Ok((hellov, nodeid, nodes))
}
/// Adds a job to a queue.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// # use std::time::Duration;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// let jobid = disque.addjob(b"my queue", b"my job",
/// Duration::from_secs(10), None, None, None, None, None, false
/// ).unwrap();
/// println!("My job id is {}", jobid);
/// ```
pub fn addjob(&self, queue_name: &[u8], job: &[u8], timeout: Duration,
replicate: Option<usize>, delay: Option<Duration>,
retry: Option<Duration>, ttl: Option<Duration>,
maxlen: Option<usize>, async: bool,
) -> RedisResult<String> {
let mut c = cmd("ADDJOB");
c
.arg(queue_name)
.arg(job)
.arg(duration_to_millis(&timeout));
option_arg!(c, "REPLICATE", replicate);
option_arg!(c, "DELAY", delay.map(|x| x.as_secs()));
option_arg!(c, "RETRY", retry.map(|x| x.as_secs()));
option_arg!(c, "TTL", ttl.map(|x| x.as_secs()));
option_arg!(c, "MAXLEN", maxlen);
if async { c.arg("ASYNC"); }
c.query(&self.connection)
}
/// Gets up to `count` jobs from certain `queues`.
///
/// # Examples
/// ```
/// # use disque::Disque;
/// # use std::time::Duration;
/// let disque = Disque::open("redis://127.0.0.1:7711/").unwrap();
/// let queue = b"my getjob_count queue";
/// disque.addjob(queue, b"my job 1", Duration::from_secs(10),
/// None, None, None, None, None, false
/// ).unwrap();
/// disque.addjob(queue, b"my job 2", Duration::from_secs(10),
/// None, None, None, None, None, false
/// ).unwrap();
///
/// let jobs = disque.getjob_count(true, None, 10, &[queue]).unwrap();
/// assert_eq!(jobs.len(), 2);
/// assert_eq!(jobs[0].2, b"my job 1");
/// assert_eq!(jobs[1].2, b"my job 2");
/// ```
pub fn getjob_count(&self, nohang: bool, timeout: Option<Duration>,
count: usize, queues: &[&[u8]]
) -> RedisResult<Vec<(Vec<u8>, String, Vec<u8>)>> {
let mut c = cmd("GETJOB");
if nohang { c.arg("NOHANG"); }
option_arg!(c, "TIMEOUT", timeout.map(|t| duration_to_millis(&t)));
c.arg("COUNT").arg(count);
c.arg("FROM");
for queue in queues { c.arg(*queue); }
let v:Vec<Vec<Vec<u8>>> = try!(c.query(&self.connection));
let mut r = vec![];
for mut x in v.into_iter() {
if x.len() != 3 {
return Err(RedisError::from((ErrorKind::TypeError,
"Expected exactly three elements")));
}
let job = x.pop().unwrap();
let jobid = x.pop().unwrap();
let queue = x.pop().unwrap();
r.push((queue, match String::from_utf8(jobid) {
Ok(v) => v,
Err(_) => return Err(RedisError::from((ErrorKind::TypeError,
"Expected utf8 job id"))),
}, job));
}
Ok(r)
// Ok(v.into_iter().map(|x| (x[0], String::from_utf8(x[1]).unwrap(), x[2])).collect())
}
/// Gets a single job from any of the specified `queues`.
pub fn getjob(&self, nohang: bool, timeout: Option<Duration>,
queues: &[&[u8]]
) -> RedisResult<Option<(Vec<u8>, String, Vec<u8>)>> {
let mut jobs = try!(self.getjob_count(nohang, timeout, 1, queues));
Ok(jobs.pop())
}
/// Acknowledge jobs.
pub fn ackjob(&self, jobids: &[&[u8]]) -> RedisResult<bool> {
let mut c = cmd("ACKJOB");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Fast acknowledge jobs.
pub fn fastack(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("FASTACK");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Tell Disque that a job is still processed.
pub fn working(&self, jobid: &[u8]) -> RedisResult<Duration> {
let retry = try!(cmd("WORKING").arg(jobid).query(&self.connection));
Ok(Duration::from_secs(retry))
}
/// Tells Disque to put back the job in the queue ASAP. Should be used when
/// the worker was not able to process a message and wants the message to
/// be put back into the queue in order to be processed again.
pub fn nack(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("NACK");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Information about the server
pub fn info(&self) -> RedisResult<InfoDict> {
cmd("INFO").query(&self.connection)
}
/// Size of the queue
pub fn qlen(&self, queue_name: &[u8]) -> RedisResult<usize> {
cmd("QLEN").arg(queue_name).query(&self.connection)
}
/// Gets jobs from `queue_name` up to the absolute number of `count`.
/// If count is negative, it will be from newest to oldest.
pub fn qpeek(&self, queue_name: &[u8], count: i64
) -> RedisResult<Vec<Vec<Vec<u8>>>> {
cmd("QPEEK").arg(queue_name).arg(count).query(&self.connection)
}
/// Queue jobs
pub fn enqueue(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("ENQUEUE");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Remove jobs from queue
pub fn dequeue(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("DEQUEUE");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Completely delete a job from a single node.
pub fn deljob(&self, jobids: &[&[u8]]) -> RedisResult<usize> {
let mut c = cmd("DELJOB");
for jobid in jobids { c.arg(*jobid); }
c.query(&self.connection)
}
/// Returns full information about a job, like its current state and data.
pub fn show(&self, jobid: &[u8]) -> RedisResult<HashMap<String, Value>> {
let info:Value = try!(cmd("SHOW").arg(jobid).query(&self.connection));
let mut h = HashMap::new();
let mut items = match info {
Value::Bulk(items) => items,
_ => return Err(RedisError::from((ErrorKind::TypeError,
"Expected multi-bulk"))),
};
if items.len() % 2 != 0 {
return Err(RedisError::from((ErrorKind::TypeError,
"Expected an even number of elements")));
}
while items.len() > 0 {
let value = items.pop().unwrap();
let key:String = try!(String::from_redis_value(&items.pop().unwrap()));
h.insert(key, value);
}
Ok(h)
}
/// Iterator to run all queues that fulfil a criteria.
/// The iterator will batch into segments of approximate `count` size.
pub fn qscan(&self, cursor: u64, count: u64, busyloop: bool,
minlen: Option<u64>, maxlen: Option<u64>, importrate: Option<u64>
) -> RedisResult<Iter<Vec<u8>>> {
let mut c = cmd("QSCAN");
c.arg("COUNT").arg(count);
if busyloop { c.arg("BUSYLOOP"); }
option_arg!(c, "MINLEN", minlen);
option_arg!(c, "MAXLEN", maxlen);
option_arg!(c, "IMPORTRATE", importrate);
c.cursor_arg(cursor).iter(&self.connection)
}
/// Iterator to run all jobs that fulfil a criteria.
/// The iterator will batch into segments of approximate `count` size.
pub fn jscan_id(&self, cursor: u64, count: u64, blocking: bool,
queue: Option<&[u8]>, states: &[&str]
) -> RedisResult<Iter<String>> {
let mut c = cmd("JSCAN");
c.arg("COUNT").arg(count);
if blocking { c.arg("BLOCKING"); }
option_arg!(c, "QUEUE", queue);
for state in states {
c.arg("STATE").arg(*state);
}
c.cursor_arg(cursor).iter(&self.connection)
}
}
#[cfg(test)]
fn conn() -> Disque {
Disque::open("redis://127.0.0.1:7711/").unwrap()
}
#[test]
fn can_connect() {
conn();
}
#[test]
fn hello() {
let disque = conn();
let (v, nodeid, nodes) = disque.hello().unwrap();
assert_eq!(v, 1);
assert!(nodes.into_iter().map(|n| n.0).collect::<Vec<_>>().contains(&nodeid));
}
#[test]
fn addjob() {
let disque = conn();
let jobid = disque.addjob(b"queue", b"job", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(jobid.len(), 48);
assert_eq!(&jobid[..2], "DI");
assert_eq!(&jobid[46..], "SQ");
}
#[test]
fn getjob_count() {
let disque = conn();
let j1 = disque.addjob(b"queue1", b"job1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue2", b"job2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue3", b"job3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let jobs = disque.getjob_count(false, None, 3, &[b"queue1", b"queue2"]).unwrap();
assert_eq!(jobs.len(), 2);
assert_eq!(jobs[0].0, b"queue1");
assert_eq!(jobs[0].1, j1);
assert_eq!(jobs[0].2, b"job1");
assert_eq!(jobs[1].0, b"queue2");
assert_eq!(jobs[1].1, j2);
assert_eq!(jobs[1].2, b"job2");
}
#[test]
fn getjob() {
let disque = conn();
let jobid = disque.addjob(b"queue4", b"job4", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let job = disque.getjob(false, None, &[b"queue4", b"queue5"]).unwrap().unwrap();
assert_eq!(job.0, b"queue4");
assert_eq!(job.1, jobid);
assert_eq!(job.2, b"job4");
}
#[test]
fn ackjob() {
let disque = conn();
let jobid = disque.addjob(b"queue6", b"job6", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.ackjob(&[jobid.as_bytes()]).unwrap());
// FIXME: crashes disque-server, see https://github.com/antirez/disque/issues/113
// assert!(!disque.ackjob(&[jobid.as_bytes()]).unwrap());
// assert!(!disque.ackjob(&[jobid.as_bytes()]).unwrap());
}
#[test]
fn fastack() {
let disque = conn();
let jobid = disque.addjob(b"queue7", b"job7", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.fastack(&[jobid.as_bytes()]).unwrap() == 1);
assert!(disque.fastack(&[jobid.as_bytes()]).unwrap() == 0);
assert!(disque.fastack(&[jobid.as_bytes()]).unwrap() == 0);
}
#[test]
fn working() {
let disque = conn();
let jobid = disque.addjob(b"queue8", b"job8", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.working(jobid.as_bytes()).unwrap().as_secs() > 0);
}
#[test]
fn nack() {
let disque = conn();
let j1 = disque.addjob(b"queue9", b"job9.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue9", b"job9.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j3 = disque.addjob(b"queue9", b"job9.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.getjob_count(false, None, 100, &[b"queue9"]).unwrap().len(), 3);
assert_eq!(disque.nack(&[j1.as_bytes(), j2.as_bytes(), j3.as_bytes()]).unwrap(), 3);
assert_eq!(disque.getjob_count(false, None, 100, &[b"queue9"]).unwrap().len(), 3);
}
#[test]
fn info() {
let disque = conn();
let info = disque.info().unwrap();
let _:String = info.get("disque_version").unwrap();
}
#[test]
fn qlen() {
let disque = conn();
disque.addjob(b"queue10", b"job10", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue10", b"job10", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue10", b"job10", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.qlen(b"queue10").unwrap(), 3);
assert_eq!(disque.getjob_count(false, None, 100, &[b"queue10"]).unwrap().len(), 3);
}
#[test]
fn qpeek() {
let disque = conn();
let j1 = disque.addjob(b"queue11", b"job11.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue11", b"job11.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.qpeek(b"queue11", 10).unwrap(), vec![
vec![
b"queue11".to_vec(),
j1.as_bytes().to_vec(),
b"job11.1".to_vec(),
],
vec![
b"queue11".to_vec(),
j2.as_bytes().to_vec(),
b"job11.2".to_vec(),
],
]);
assert_eq!(disque.qpeek(b"queue11", -10).unwrap(), vec![
vec![
b"queue11".to_vec(),
j2.as_bytes().to_vec(),
b"job11.2".to_vec(),
],
vec![
b"queue11".to_vec(),
j1.as_bytes().to_vec(),
b"job11.1".to_vec(),
],
]);
assert_eq!(disque.getjob_count(false, None, 100, &[b"queue11"]).unwrap().len(), 2);
}
#[test]
fn enqueue() {
let disque = conn();
let j1 = disque.addjob(b"queue12", b"job12.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue12", b"job12.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j3 = disque.addjob(b"queue12", b"job12.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.getjob_count(false, None, 100, &[b"queue12"]).unwrap().len(), 3);
assert_eq!(disque.enqueue(&[j1.as_bytes(), j2.as_bytes(), j3.as_bytes()]).unwrap(), 3);
assert_eq!(disque.getjob_count(false, None, 100, &[b"queue12"]).unwrap().len(), 3);
}
#[test]
fn dequeue() {
let disque = conn();
let j1 = disque.addjob(b"queue13", b"job13.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue13", b"job13.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j3 = disque.addjob(b"queue13", b"job13.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.dequeue(&[j1.as_bytes(), j2.as_bytes(), j3.as_bytes()]).unwrap(), 3);
assert_eq!(disque.getjob_count(true, None, 100, &[b"queue13"]).unwrap().len(), 0);
}
#[test]
fn deljob() {
let disque = conn();
let j1 = disque.addjob(b"queue14", b"job14.1", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let j2 = disque.addjob(b"queue14", b"job14.2", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
disque.addjob(b"queue14", b"job14.3", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert_eq!(disque.deljob(&[j1.as_bytes(), j2.as_bytes()]).unwrap(), 2);
assert_eq!(disque.getjob_count(true, None, 100, &[b"queue14"]).unwrap().len(), 1);
}
#[test]
fn show() {
let disque = conn();
let jobid = disque.addjob(b"queue15", b"job15", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let info = disque.show(jobid.as_bytes()).unwrap();
assert_eq!(info.get("id").unwrap(), &Value::Data(jobid.as_bytes().to_vec()));
assert_eq!(info.get("queue").unwrap(), &Value::Data(b"queue15".to_vec()));
assert_eq!(info.get("state").unwrap(), &Value::Data(b"queued".to_vec()));
}
#[test]
fn qscan() {
let disque = conn();
disque.addjob(b"queue16", b"job16", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
let queues = disque.qscan(0, 1000, false, None, None, None).unwrap().collect::<Vec<_>>();
assert!(queues.contains(&b"queue16".to_vec()));
}
#[test]
fn jscan_id() {
let disque = conn();
let job = disque.addjob(b"queue17", b"job17", Duration::from_secs(10), None, None, None, None, None, false).unwrap();
assert!(disque.jscan_id(0, 1000, false, None, &[]).unwrap().collect::<Vec<_>>().contains(&job));
assert!(!disque.jscan_id(0, 1000, false, Some(b"queue16"), &[]).unwrap().collect::<Vec<_>>().contains(&job));
assert!(disque.jscan_id(0, 1000, false, Some(b"queue17"), &[]).unwrap().collect::<Vec<_>>().contains(&job));
}
|
//! This crate implements the backend server for https://crates.io/
//!
//! All implemented routes are defined in the [middleware](fn.middleware.html) function and
//! implemented in the [category](category/index.html), [keyword](keyword/index.html),
//! [krate](krate/index.html), [user](user/index.html) and [version](version/index.html) modules.
#![deny(warnings)]
#![deny(missing_debug_implementations, missing_copy_implementations)]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![recursion_limit = "128"]
extern crate ammonia;
extern crate chrono;
extern crate comrak;
extern crate curl;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_codegen;
extern crate diesel_full_text_search;
extern crate dotenv;
extern crate flate2;
extern crate git2;
extern crate hex;
extern crate lettre;
extern crate license_exprs;
#[macro_use]
extern crate log;
extern crate oauth2;
extern crate openssl;
extern crate r2d2;
extern crate r2d2_diesel;
extern crate rand;
extern crate s3;
extern crate semver;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
extern crate tar;
extern crate time;
extern crate toml;
extern crate url;
extern crate conduit;
extern crate conduit_conditional_get;
extern crate conduit_cookie;
extern crate conduit_git_http_backend;
extern crate conduit_log_requests;
extern crate conduit_middleware;
extern crate conduit_router;
extern crate conduit_static;
extern crate cookie;
pub use app::App;
pub use self::badge::Badge;
pub use self::category::Category;
pub use config::Config;
pub use self::dependency::Dependency;
pub use self::download::VersionDownload;
pub use self::keyword::Keyword;
pub use self::krate::Crate;
pub use self::user::User;
pub use self::version::Version;
pub use self::uploaders::{Bomb, Uploader};
use std::sync::Arc;
use std::error::Error;
use conduit_router::RouteBuilder;
use conduit_middleware::MiddlewareBuilder;
use util::{R404, C, R};
pub mod app;
pub mod badge;
pub mod categories;
pub mod category;
pub mod config;
pub mod crate_owner_invitation;
pub mod db;
pub mod dependency;
pub mod dist;
pub mod download;
pub mod git;
pub mod http;
pub mod keyword;
pub mod krate;
pub mod owner;
pub mod render;
pub mod schema;
pub mod token;
pub mod upload;
pub mod uploaders;
pub mod user;
pub mod util;
pub mod version;
pub mod email;
mod local_upload;
mod pagination;
/// Used for setting different values depending on whether the app is being run in production,
/// in development, or for testing.
///
/// The app's `config.env` value is set in *src/bin/server.rs* to `Production` if the environment
/// variable `HEROKU` is set and `Development` otherwise. `config.env` is set to `Test`
/// unconditionally in *src/test/all.rs*.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum Env {
Development,
Test,
Production,
}
/// Used for setting different values depending on the type of registry this instance is.
///
/// `Primary` indicates this instance is a primary registry that is the source of truth for these
/// crates' information. `ReadOnlyMirror` indicates this instanceis a read-only mirror of crate
/// information that exists on another instance.
///
/// The app's `config.mirror` value is set in *src/bin/server.rs* to `ReadOnlyMirror` if the
/// `MIRROR` environment variable is set and to `Primary` otherwise.
///
/// There may be more ways to run crates.io servers in the future, such as a
/// mirror that also has private crates that crates.io does not have.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum Replica {
Primary,
ReadOnlyMirror,
}
/// Configures routes, sessions, logging, and other middleware.
///
/// Called from *src/bin/server.rs*.
pub fn middleware(app: Arc<App>) -> MiddlewareBuilder {
let mut api_router = RouteBuilder::new();
api_router.get("/crates", C(krate::index));
api_router.get("/crates/:crate_id", C(krate::show));
api_router.put("/crates/new", C(krate::new));
api_router.get("/crates/:crate_id/:version", C(version::show));
api_router.get("/crates/:crate_id/:version/download", C(krate::download));
api_router.get("/crates/:crate_id/:version/readme", C(krate::readme));
api_router.get(
"/crates/:crate_id/:version/dependencies",
C(version::dependencies),
);
api_router.get(
"/crates/:crate_id/:version/downloads",
C(version::downloads),
);
api_router.get("/crates/:crate_id/:version/authors", C(version::authors));
// Used to generate download graphs
api_router.get("/crates/:crate_id/downloads", C(krate::downloads));
api_router.get("/crates/:crate_id/versions", C(krate::versions));
api_router.put("/crates/:crate_id/follow", C(krate::follow));
api_router.delete("/crates/:crate_id/follow", C(krate::unfollow));
api_router.get("/crates/:crate_id/following", C(krate::following));
// This endpoint may now be redundant, check frontend to see if it is
// being used
api_router.get("/crates/:crate_id/owners", C(krate::owners));
api_router.get("/crates/:crate_id/owner_team", C(krate::owner_team));
api_router.get("/crates/:crate_id/owner_user", C(krate::owner_user));
api_router.put("/crates/:crate_id/owners", C(krate::add_owners));
api_router.delete("/crates/:crate_id/owners", C(krate::remove_owners));
api_router.delete("/crates/:crate_id/:version/yank", C(version::yank));
api_router.put("/crates/:crate_id/:version/unyank", C(version::unyank));
api_router.get(
"/crates/:crate_id/reverse_dependencies",
C(krate::reverse_dependencies),
);
api_router.get("/versions", C(version::index));
api_router.get("/versions/:version_id", C(version::show));
api_router.get("/keywords", C(keyword::index));
api_router.get("/keywords/:keyword_id", C(keyword::show));
api_router.get("/categories", C(category::index));
api_router.get("/categories/:category_id", C(category::show));
api_router.get("/category_slugs", C(category::slugs));
api_router.get("/users/:user_id", C(user::show));
api_router.put("/users/:user_id", C(user::update_user));
api_router.get("/users/:user_id/stats", C(user::stats));
api_router.get("/teams/:team_id", C(user::show_team));
api_router.get("/me", C(user::me));
api_router.get("/me/updates", C(user::updates));
api_router.get("/me/tokens", C(token::list));
api_router.post("/me/tokens", C(token::new));
api_router.delete("/me/tokens/:id", C(token::revoke));
api_router.get(
"/me/crate_owner_invitations",
C(crate_owner_invitation::list),
);
api_router.put(
"/me/crate_owner_invitations/:crate_id",
C(crate_owner_invitation::handle_invite),
);
api_router.get("/summary", C(krate::summary));
api_router.put("/confirm/:email_token", C(user::confirm_user_email));
api_router.put("/users/:user_id/resend", C(user::regenerate_token_and_send));
let api_router = Arc::new(R404(api_router));
let mut router = RouteBuilder::new();
// Mount the router under the /api/v1 path so we're at least somewhat at the
// liberty to change things in the future!
router.get("/api/v1/*path", R(api_router.clone()));
router.put("/api/v1/*path", R(api_router.clone()));
router.post("/api/v1/*path", R(api_router.clone()));
router.head("/api/v1/*path", R(api_router.clone()));
router.delete("/api/v1/*path", R(api_router));
router.get("/authorize_url", C(user::github_authorize));
router.get("/authorize", C(user::github_access_token));
router.delete("/logout", C(user::logout));
// Only serve the local checkout of the git index in development mode.
// In production, for crates.io, cargo gets the index from
// https://github.com/rust-lang/crates.io-index directly.
let env = app.config.env;
if env == Env::Development {
let s = conduit_git_http_backend::Serve(app.git_repo_checkout.clone());
let s = Arc::new(s);
router.get("/git/index/*path", R(s.clone()));
router.post("/git/index/*path", R(s));
}
let mut m = MiddlewareBuilder::new(R404(router));
if env == Env::Development {
// DebugMiddleware is defined below to print logs for each request.
m.add(DebugMiddleware);
m.around(local_upload::Middleware::default());
}
if env != Env::Test {
m.add(conduit_log_requests::LogRequests(log::LogLevel::Info));
}
m.around(util::Head::default());
m.add(conduit_conditional_get::ConditionalGet);
m.add(conduit_cookie::Middleware::new());
m.add(conduit_cookie::SessionMiddleware::new(
"cargo_session",
cookie::Key::from_master(app.session_key.as_bytes()),
env == Env::Production,
));
if env == Env::Production {
m.add(http::SecurityHeadersMiddleware::new(&app.config.uploader));
}
m.add(app::AppMiddleware::new(app));
// Sets the current user on each request.
m.add(user::Middleware);
// Serve the static files in the *dist* directory, which are the frontend assets.
// Not needed for the backend tests.
if env != Env::Test {
m.around(dist::Middleware::default());
}
return m;
struct DebugMiddleware;
impl conduit_middleware::Middleware for DebugMiddleware {
fn before(&self, req: &mut conduit::Request) -> Result<(), Box<Error + Send>> {
println!(" version: {}", req.http_version());
println!(" method: {:?}", req.method());
println!(" scheme: {:?}", req.scheme());
println!(" host: {:?}", req.host());
println!(" path: {}", req.path());
println!(" query_string: {:?}", req.query_string());
println!(" remote_addr: {:?}", req.remote_addr());
for &(k, ref v) in &req.headers().all() {
println!(" hdr: {}={:?}", k, v);
}
Ok(())
}
fn after(
&self,
_req: &mut conduit::Request,
res: Result<conduit::Response, Box<Error + Send>>,
) -> Result<conduit::Response, Box<Error + Send>> {
res.map(|res| {
println!(" <- {:?}", res.status);
for (k, v) in &res.headers {
println!(" <- {} {:?}", k, v);
}
res
})
}
}
}
/// Convenience function for getting the current server time in UTC.
pub fn now() -> time::Timespec {
time::now_utc().to_timespec()
}
/// Convenience function for getting a time in RFC 3339 format.
///
/// Example: `2012-02-22T14:53:18Z`. Used for returning time values in JSON API responses.
pub fn encode_time(ts: time::Timespec) -> String {
time::at_utc(ts).rfc3339().to_string()
}
/// Convenience function requiring that an environment variable is set.
///
/// Ensures that we've initialized the dotenv crate in order to read environment variables
/// from a *.env* file if present. Don't use this for optionally set environment variables.
///
/// # Panics
///
/// Panics if the environment variable with the name passed in as an argument is not defined
/// in the current environment.
pub fn env(s: &str) -> String {
dotenv::dotenv().ok();
::std::env::var(s).unwrap_or_else(|_| panic!("must have `{}` defined", s))
}
sql_function!(lower, lower_t, (x: ::diesel::types::Text) -> ::diesel::types::Text);
Reorganize routes to group them by function
//! This crate implements the backend server for https://crates.io/
//!
//! All implemented routes are defined in the [middleware](fn.middleware.html) function and
//! implemented in the [category](category/index.html), [keyword](keyword/index.html),
//! [krate](krate/index.html), [user](user/index.html) and [version](version/index.html) modules.
#![deny(warnings)]
#![deny(missing_debug_implementations, missing_copy_implementations)]
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#![recursion_limit = "128"]
extern crate ammonia;
extern crate chrono;
extern crate comrak;
extern crate curl;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_codegen;
extern crate diesel_full_text_search;
extern crate dotenv;
extern crate flate2;
extern crate git2;
extern crate hex;
extern crate lettre;
extern crate license_exprs;
#[macro_use]
extern crate log;
extern crate oauth2;
extern crate openssl;
extern crate r2d2;
extern crate r2d2_diesel;
extern crate rand;
extern crate s3;
extern crate semver;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
extern crate tar;
extern crate time;
extern crate toml;
extern crate url;
extern crate conduit;
extern crate conduit_conditional_get;
extern crate conduit_cookie;
extern crate conduit_git_http_backend;
extern crate conduit_log_requests;
extern crate conduit_middleware;
extern crate conduit_router;
extern crate conduit_static;
extern crate cookie;
pub use app::App;
pub use self::badge::Badge;
pub use self::category::Category;
pub use config::Config;
pub use self::dependency::Dependency;
pub use self::download::VersionDownload;
pub use self::keyword::Keyword;
pub use self::krate::Crate;
pub use self::user::User;
pub use self::version::Version;
pub use self::uploaders::{Bomb, Uploader};
use std::sync::Arc;
use std::error::Error;
use conduit_router::RouteBuilder;
use conduit_middleware::MiddlewareBuilder;
use util::{R404, C, R};
pub mod app;
pub mod badge;
pub mod categories;
pub mod category;
pub mod config;
pub mod crate_owner_invitation;
pub mod db;
pub mod dependency;
pub mod dist;
pub mod download;
pub mod git;
pub mod http;
pub mod keyword;
pub mod krate;
pub mod owner;
pub mod render;
pub mod schema;
pub mod token;
pub mod upload;
pub mod uploaders;
pub mod user;
pub mod util;
pub mod version;
pub mod email;
mod local_upload;
mod pagination;
/// Used for setting different values depending on whether the app is being run in production,
/// in development, or for testing.
///
/// The app's `config.env` value is set in *src/bin/server.rs* to `Production` if the environment
/// variable `HEROKU` is set and `Development` otherwise. `config.env` is set to `Test`
/// unconditionally in *src/test/all.rs*.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum Env {
Development,
Test,
Production,
}
/// Used for setting different values depending on the type of registry this instance is.
///
/// `Primary` indicates this instance is a primary registry that is the source of truth for these
/// crates' information. `ReadOnlyMirror` indicates this instanceis a read-only mirror of crate
/// information that exists on another instance.
///
/// The app's `config.mirror` value is set in *src/bin/server.rs* to `ReadOnlyMirror` if the
/// `MIRROR` environment variable is set and to `Primary` otherwise.
///
/// There may be more ways to run crates.io servers in the future, such as a
/// mirror that also has private crates that crates.io does not have.
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum Replica {
Primary,
ReadOnlyMirror,
}
/// Configures routes, sessions, logging, and other middleware.
///
/// Called from *src/bin/server.rs*.
pub fn middleware(app: Arc<App>) -> MiddlewareBuilder {
let mut api_router = RouteBuilder::new();
// Route used by both `cargo search` and the frontend
api_router.get("/crates", C(krate::index));
// Routes used by `cargo`
api_router.put("/crates/new", C(krate::new));
api_router.get("/crates/:crate_id/owners", C(krate::owners));
api_router.put("/crates/:crate_id/owners", C(krate::add_owners));
api_router.delete("/crates/:crate_id/owners", C(krate::remove_owners));
api_router.delete("/crates/:crate_id/:version/yank", C(version::yank));
api_router.put("/crates/:crate_id/:version/unyank", C(version::unyank));
api_router.get("/crates/:crate_id/:version/download", C(krate::download));
// Routes that appear to be unused
api_router.get("/versions", C(version::index));
api_router.get("/versions/:version_id", C(version::show));
// Routes used by the frontend
api_router.get("/crates/:crate_id", C(krate::show));
api_router.get("/crates/:crate_id/:version", C(version::show));
api_router.get("/crates/:crate_id/:version/readme", C(krate::readme));
api_router.get(
"/crates/:crate_id/:version/dependencies",
C(version::dependencies),
);
api_router.get(
"/crates/:crate_id/:version/downloads",
C(version::downloads),
);
api_router.get("/crates/:crate_id/:version/authors", C(version::authors));
api_router.get("/crates/:crate_id/downloads", C(krate::downloads));
api_router.get("/crates/:crate_id/versions", C(krate::versions));
api_router.put("/crates/:crate_id/follow", C(krate::follow));
api_router.delete("/crates/:crate_id/follow", C(krate::unfollow));
api_router.get("/crates/:crate_id/following", C(krate::following));
api_router.get("/crates/:crate_id/owner_team", C(krate::owner_team));
api_router.get("/crates/:crate_id/owner_user", C(krate::owner_user));
api_router.get(
"/crates/:crate_id/reverse_dependencies",
C(krate::reverse_dependencies),
);
api_router.get("/keywords", C(keyword::index));
api_router.get("/keywords/:keyword_id", C(keyword::show));
api_router.get("/categories", C(category::index));
api_router.get("/categories/:category_id", C(category::show));
api_router.get("/category_slugs", C(category::slugs));
api_router.get("/users/:user_id", C(user::show));
api_router.put("/users/:user_id", C(user::update_user));
api_router.get("/users/:user_id/stats", C(user::stats));
api_router.get("/teams/:team_id", C(user::show_team));
api_router.get("/me", C(user::me));
api_router.get("/me/updates", C(user::updates));
api_router.get("/me/tokens", C(token::list));
api_router.post("/me/tokens", C(token::new));
api_router.delete("/me/tokens/:id", C(token::revoke));
api_router.get(
"/me/crate_owner_invitations",
C(crate_owner_invitation::list),
);
api_router.put(
"/me/crate_owner_invitations/:crate_id",
C(crate_owner_invitation::handle_invite),
);
api_router.get("/summary", C(krate::summary));
api_router.put("/confirm/:email_token", C(user::confirm_user_email));
api_router.put("/users/:user_id/resend", C(user::regenerate_token_and_send));
let api_router = Arc::new(R404(api_router));
let mut router = RouteBuilder::new();
// Mount the router under the /api/v1 path so we're at least somewhat at the
// liberty to change things in the future!
router.get("/api/v1/*path", R(api_router.clone()));
router.put("/api/v1/*path", R(api_router.clone()));
router.post("/api/v1/*path", R(api_router.clone()));
router.head("/api/v1/*path", R(api_router.clone()));
router.delete("/api/v1/*path", R(api_router));
router.get("/authorize_url", C(user::github_authorize));
router.get("/authorize", C(user::github_access_token));
router.delete("/logout", C(user::logout));
// Only serve the local checkout of the git index in development mode.
// In production, for crates.io, cargo gets the index from
// https://github.com/rust-lang/crates.io-index directly.
let env = app.config.env;
if env == Env::Development {
let s = conduit_git_http_backend::Serve(app.git_repo_checkout.clone());
let s = Arc::new(s);
router.get("/git/index/*path", R(s.clone()));
router.post("/git/index/*path", R(s));
}
let mut m = MiddlewareBuilder::new(R404(router));
if env == Env::Development {
// DebugMiddleware is defined below to print logs for each request.
m.add(DebugMiddleware);
m.around(local_upload::Middleware::default());
}
if env != Env::Test {
m.add(conduit_log_requests::LogRequests(log::LogLevel::Info));
}
m.around(util::Head::default());
m.add(conduit_conditional_get::ConditionalGet);
m.add(conduit_cookie::Middleware::new());
m.add(conduit_cookie::SessionMiddleware::new(
"cargo_session",
cookie::Key::from_master(app.session_key.as_bytes()),
env == Env::Production,
));
if env == Env::Production {
m.add(http::SecurityHeadersMiddleware::new(&app.config.uploader));
}
m.add(app::AppMiddleware::new(app));
// Sets the current user on each request.
m.add(user::Middleware);
// Serve the static files in the *dist* directory, which are the frontend assets.
// Not needed for the backend tests.
if env != Env::Test {
m.around(dist::Middleware::default());
}
return m;
struct DebugMiddleware;
impl conduit_middleware::Middleware for DebugMiddleware {
fn before(&self, req: &mut conduit::Request) -> Result<(), Box<Error + Send>> {
println!(" version: {}", req.http_version());
println!(" method: {:?}", req.method());
println!(" scheme: {:?}", req.scheme());
println!(" host: {:?}", req.host());
println!(" path: {}", req.path());
println!(" query_string: {:?}", req.query_string());
println!(" remote_addr: {:?}", req.remote_addr());
for &(k, ref v) in &req.headers().all() {
println!(" hdr: {}={:?}", k, v);
}
Ok(())
}
fn after(
&self,
_req: &mut conduit::Request,
res: Result<conduit::Response, Box<Error + Send>>,
) -> Result<conduit::Response, Box<Error + Send>> {
res.map(|res| {
println!(" <- {:?}", res.status);
for (k, v) in &res.headers {
println!(" <- {} {:?}", k, v);
}
res
})
}
}
}
/// Convenience function for getting the current server time in UTC.
pub fn now() -> time::Timespec {
time::now_utc().to_timespec()
}
/// Convenience function for getting a time in RFC 3339 format.
///
/// Example: `2012-02-22T14:53:18Z`. Used for returning time values in JSON API responses.
pub fn encode_time(ts: time::Timespec) -> String {
time::at_utc(ts).rfc3339().to_string()
}
/// Convenience function requiring that an environment variable is set.
///
/// Ensures that we've initialized the dotenv crate in order to read environment variables
/// from a *.env* file if present. Don't use this for optionally set environment variables.
///
/// # Panics
///
/// Panics if the environment variable with the name passed in as an argument is not defined
/// in the current environment.
pub fn env(s: &str) -> String {
dotenv::dotenv().ok();
::std::env::var(s).unwrap_or_else(|_| panic!("must have `{}` defined", s))
}
sql_function!(lower, lower_t, (x: ::diesel::types::Text) -> ::diesel::types::Text);
|
//! This crate provides (at this time) a single function, `take()`.
//!
//! `take()` allows for taking `T` out of a `&mut T`, doing anything with it including consuming it, and producing another `T` to put back in the `&mut T`.
//!
//! During `take()`, if a panic occurs, the entire process will be exited, as there's no valid `T` to put back into the `&mut T`.
//! Use `take_or_recover()` to replace the `&mut T` with a recovery value before continuing the panic.
//!
//! Contrast with `std::mem::replace()`, which allows for putting a different `T` into a `&mut T`, but requiring the new `T` to be available before being able to consume the old `T`.
use std::panic;
/// Allows use of a value pointed to by `&mut T` as though it was owned, as long as a `T` is made available afterwards.
///
/// The closure must return a valid T.
/// # Important
/// Will exit the program (with status code 101) if the closure panics.
///
/// # Example
/// ```
/// struct Foo;
/// let mut foo = Foo;
/// take_mut::take(&mut foo, |foo| {
/// // Can now consume the Foo, and provide a new value later
/// drop(foo);
/// // Do more stuff
/// Foo // Return new Foo from closure, which goes back into the &mut Foo
/// });
/// ```
pub fn take<T, F>(mut_ref: &mut T, closure: F)
where F: FnOnce(T) -> T {
use std::ptr;
unsafe {
let old_t = ptr::read(mut_ref);
let new_t = panic::catch_unwind(panic::AssertUnwindSafe(|| closure(old_t)))
.unwrap_or_else(|_| ::std::process::exit(101));
ptr::write(mut_ref, new_t);
}
}
#[test]
fn it_works() {
#[derive(PartialEq, Eq, Debug)]
enum Foo {A, B};
impl Drop for Foo {
fn drop(&mut self) {
match *self {
Foo::A => println!("Foo::A dropped"),
Foo::B => println!("Foo::B dropped")
}
}
}
let mut foo = Foo::A;
take(&mut foo, |f| {
drop(f);
Foo::B
});
assert_eq!(&foo, &Foo::B);
}
/// Allows use of a value pointed to by `&mut T` as though it was owned, as long as a `T` is made available afterwards.
///
/// The closure must return a valid T.
/// # Important
/// Will replace `&mut T` with `recover` if the closure panics, then continues the panic.
///
/// # Example
/// ```
/// struct Foo;
/// let mut foo = Foo;
/// take_mut::take_or_recover(&mut foo, || Foo, |foo| {
/// // Can now consume the Foo, and provide a new value later
/// drop(foo);
/// // Do more stuff
/// Foo // Return new Foo from closure, which goes back into the &mut Foo
/// });
/// ```
pub fn take_or_recover<T, F, R>(mut_ref: &mut T, recover: R, closure: F)
where F: FnOnce(T) -> T, R: FnOnce() -> T {
use std::ptr;
unsafe {
let old_t = ptr::read(mut_ref);
let new_t = panic::catch_unwind(panic::AssertUnwindSafe(|| closure(old_t)));
match new_t {
Err(err) => {
let r = panic::catch_unwind(panic::AssertUnwindSafe(|| recover()))
.unwrap_or_else(|_| ::std::process::exit(101));
ptr::write(mut_ref, r);
panic::resume_unwind(err);
}
Ok(new_t) => ptr::write(mut_ref, new_t),
}
}
}
use std::cell::Cell;
use std::marker::PhantomData;
pub struct Scope<'s> {
active_holes: Cell<usize>,
marker: PhantomData<Cell<&'s mut ()>>
}
impl<'s> Scope<'s> {
pub fn take_and_recover<'c, 'm: 's, T: 'm, F: FnOnce() -> T>(&'c self, mut_ref: &'m mut T, recovery: F) -> (T, Hole<'c, 'm, T, F>) {
use std::ptr;
let t: T;
let hole: Hole<'c, 'm, T, F>;
let num_of_holes = self.active_holes.get();
if num_of_holes == std::usize::MAX {
panic!("Too many holes!");
}
self.active_holes.set(num_of_holes + 1);
unsafe {
t = ptr::read(mut_ref);
hole = Hole {
active_holes: &self.active_holes,
hole: mut_ref,
recovery: recovery
};
};
(t, hole)
}
pub fn take<'c, 'm: 's, T: 'm>(&'c self, mut_ref: &'m mut T) -> (T, Hole<'c, 'm, T, fn() -> T>) {
fn panic<T>() -> T {
panic!("Failed to recover a Hole!")
}
self.take_and_recover(mut_ref, panic)
}
}
pub fn scope<'s, F, R>(f: F) -> R
where F: FnOnce(&Scope<'s>) -> R {
let this = Scope { active_holes: Cell::new(0), marker: PhantomData };
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
f(&this)
}));
if this.active_holes.get() != 0 {
std::process::exit(101);
}
match result {
Ok(r) => r,
Err(p) => panic::resume_unwind(p),
}
}
#[must_use]
pub struct Hole<'c, 'm, T: 'm, F> {
active_holes: &'c Cell<usize>,
hole: &'m mut T,
recovery: F,
}
impl<'c, 'm, T: 'm, F> Hole<'c, 'm, T, F> {
pub fn fill(mut self, t: T) {
use std::ptr;
use std::mem;
unsafe {
ptr::write(self.hole, t);
}
let num_holes = self.active_holes.get();
self.active_holes.set(num_holes - 1);
mem::forget(self);
}
}
impl<'c, 'm, T: 'm, F> Drop for Hole<'c, 'm, T, F> {
fn drop(&mut self) {
panic!("An unfilled Hole was destructed!");
}
}
#[test]
fn it_works_recover() {
#[derive(PartialEq, Eq, Debug)]
enum Foo {A, B};
impl Drop for Foo {
fn drop(&mut self) {
match *self {
Foo::A => println!("Foo::A dropped"),
Foo::B => println!("Foo::B dropped")
}
}
}
let mut foo = Foo::A;
take_or_recover(&mut foo, || Foo::A, |f| {
drop(f);
Foo::B
});
assert_eq!(&foo, &Foo::B);
}
#[test]
fn it_works_recover_panic() {
#[derive(PartialEq, Eq, Debug)]
enum Foo {A, B, C};
impl Drop for Foo {
fn drop(&mut self) {
match *self {
Foo::A => println!("Foo::A dropped"),
Foo::B => println!("Foo::B dropped"),
Foo::C => println!("Foo::C dropped")
}
}
}
let mut foo = Foo::A;
let res = panic::catch_unwind(panic::AssertUnwindSafe(|| {
take_or_recover(&mut foo, || Foo::C, |f| {
drop(f);
panic!("panic");
Foo::B
});
}));
assert!(res.is_err());
assert_eq!(&foo, &Foo::C);
}
#[test]
fn scope_based_take() {
#[derive(Debug)]
struct Foo;
#[derive(Debug)]
struct Bar {
a: Foo,
b: Foo
}
let mut bar = Bar { a: Foo, b: Foo };
scope(|scope| {
let (a, a_hole) = scope.take(&mut bar.a);
let (b, b_hole) = scope.take(&mut bar.b);
// Imagine consuming a and b
a_hole.fill(Foo);
b_hole.fill(Foo);
});
println!("{:?}", &bar);
}
Recovery code for scoped.
//! This crate provides (at this time) a single function, `take()`.
//!
//! `take()` allows for taking `T` out of a `&mut T`, doing anything with it including consuming it, and producing another `T` to put back in the `&mut T`.
//!
//! During `take()`, if a panic occurs, the entire process will be exited, as there's no valid `T` to put back into the `&mut T`.
//! Use `take_or_recover()` to replace the `&mut T` with a recovery value before continuing the panic.
//!
//! Contrast with `std::mem::replace()`, which allows for putting a different `T` into a `&mut T`, but requiring the new `T` to be available before being able to consume the old `T`.
use std::panic;
/// Allows use of a value pointed to by `&mut T` as though it was owned, as long as a `T` is made available afterwards.
///
/// The closure must return a valid T.
/// # Important
/// Will exit the program (with status code 101) if the closure panics.
///
/// # Example
/// ```
/// struct Foo;
/// let mut foo = Foo;
/// take_mut::take(&mut foo, |foo| {
/// // Can now consume the Foo, and provide a new value later
/// drop(foo);
/// // Do more stuff
/// Foo // Return new Foo from closure, which goes back into the &mut Foo
/// });
/// ```
pub fn take<T, F>(mut_ref: &mut T, closure: F)
where F: FnOnce(T) -> T {
use std::ptr;
unsafe {
let old_t = ptr::read(mut_ref);
let new_t = panic::catch_unwind(panic::AssertUnwindSafe(|| closure(old_t)))
.unwrap_or_else(|_| ::std::process::exit(101));
ptr::write(mut_ref, new_t);
}
}
#[test]
fn it_works() {
#[derive(PartialEq, Eq, Debug)]
enum Foo {A, B};
impl Drop for Foo {
fn drop(&mut self) {
match *self {
Foo::A => println!("Foo::A dropped"),
Foo::B => println!("Foo::B dropped")
}
}
}
let mut foo = Foo::A;
take(&mut foo, |f| {
drop(f);
Foo::B
});
assert_eq!(&foo, &Foo::B);
}
/// Allows use of a value pointed to by `&mut T` as though it was owned, as long as a `T` is made available afterwards.
///
/// The closure must return a valid T.
/// # Important
/// Will replace `&mut T` with `recover` if the closure panics, then continues the panic.
///
/// # Example
/// ```
/// struct Foo;
/// let mut foo = Foo;
/// take_mut::take_or_recover(&mut foo, || Foo, |foo| {
/// // Can now consume the Foo, and provide a new value later
/// drop(foo);
/// // Do more stuff
/// Foo // Return new Foo from closure, which goes back into the &mut Foo
/// });
/// ```
pub fn take_or_recover<T, F, R>(mut_ref: &mut T, recover: R, closure: F)
where F: FnOnce(T) -> T, R: FnOnce() -> T {
use std::ptr;
unsafe {
let old_t = ptr::read(mut_ref);
let new_t = panic::catch_unwind(panic::AssertUnwindSafe(|| closure(old_t)));
match new_t {
Err(err) => {
let r = panic::catch_unwind(panic::AssertUnwindSafe(|| recover()))
.unwrap_or_else(|_| ::std::process::exit(101));
ptr::write(mut_ref, r);
panic::resume_unwind(err);
}
Ok(new_t) => ptr::write(mut_ref, new_t),
}
}
}
use std::cell::Cell;
use std::marker::PhantomData;
pub struct Scope<'s> {
active_holes: Cell<usize>,
marker: PhantomData<Cell<&'s mut ()>>
}
impl<'s> Scope<'s> {
pub fn take_and_recover<'c, 'm: 's, T: 'm, F: FnOnce() -> T>(&'c self, mut_ref: &'m mut T, recovery: F) -> (T, Hole<'c, 'm, T, F>) {
use std::ptr;
let t: T;
let hole: Hole<'c, 'm, T, F>;
let num_of_holes = self.active_holes.get();
if num_of_holes == std::usize::MAX {
panic!("Too many holes!");
}
self.active_holes.set(num_of_holes + 1);
unsafe {
t = ptr::read(mut_ref);
hole = Hole {
active_holes: &self.active_holes,
hole: mut_ref,
recovery: Some(recovery)
};
};
(t, hole)
}
pub fn take<'c, 'm: 's, T: 'm>(&'c self, mut_ref: &'m mut T) -> (T, Hole<'c, 'm, T, fn() -> T>) {
fn panic<T>() -> T {
panic!("Failed to recover a Hole!")
}
self.take_and_recover(mut_ref, panic)
}
}
pub fn scope<'s, F, R>(f: F) -> R
where F: FnOnce(&Scope<'s>) -> R {
let this = Scope { active_holes: Cell::new(0), marker: PhantomData };
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
f(&this)
}));
if this.active_holes.get() != 0 {
std::process::exit(101);
}
match result {
Ok(r) => r,
Err(p) => panic::resume_unwind(p),
}
}
#[must_use]
pub struct Hole<'c, 'm, T: 'm, F: FnOnce() -> T> {
active_holes: &'c Cell<usize>,
hole: &'m mut T,
recovery: Option<F>,
}
impl<'c, 'm, T: 'm, F: FnOnce() -> T> Hole<'c, 'm, T, F> {
pub fn fill(mut self, t: T) {
use std::ptr;
use std::mem;
unsafe {
ptr::write(self.hole, t);
}
let num_holes = self.active_holes.get();
self.active_holes.set(num_holes - 1);
mem::forget(self);
}
}
impl<'c, 'm, T: 'm, F: FnOnce() -> T> Drop for Hole<'c, 'm, T, F> {
fn drop(&mut self) {
use std::ptr;
use std::mem;
let result = panic::catch_unwind(panic::AssertUnwindSafe(||{
(self.recovery.take().expect("No recovery function in Hole!"))()
}));
match result {
Ok(t) => {
unsafe {
ptr::write(self.hole, t);
}
let num_holes = self.active_holes.get();
self.active_holes.set(num_holes - 1);
},
Err(p) => panic::resume_unwind(p)
}
}
}
#[test]
fn it_works_recover() {
#[derive(PartialEq, Eq, Debug)]
enum Foo {A, B};
impl Drop for Foo {
fn drop(&mut self) {
match *self {
Foo::A => println!("Foo::A dropped"),
Foo::B => println!("Foo::B dropped")
}
}
}
let mut foo = Foo::A;
take_or_recover(&mut foo, || Foo::A, |f| {
drop(f);
Foo::B
});
assert_eq!(&foo, &Foo::B);
}
#[test]
fn it_works_recover_panic() {
#[derive(PartialEq, Eq, Debug)]
enum Foo {A, B, C};
impl Drop for Foo {
fn drop(&mut self) {
match *self {
Foo::A => println!("Foo::A dropped"),
Foo::B => println!("Foo::B dropped"),
Foo::C => println!("Foo::C dropped")
}
}
}
let mut foo = Foo::A;
let res = panic::catch_unwind(panic::AssertUnwindSafe(|| {
take_or_recover(&mut foo, || Foo::C, |f| {
drop(f);
panic!("panic");
Foo::B
});
}));
assert!(res.is_err());
assert_eq!(&foo, &Foo::C);
}
#[test]
fn scope_based_take() {
#[derive(Debug)]
struct Foo;
#[derive(Debug)]
struct Bar {
a: Foo,
b: Foo
}
let mut bar = Bar { a: Foo, b: Foo };
scope(|scope| {
let (a, a_hole) = scope.take(&mut bar.a);
let (b, b_hole) = scope.take(&mut bar.b);
// Imagine consuming a and b
a_hole.fill(Foo);
b_hole.fill(Foo);
});
println!("{:?}", &bar);
}
|
//! A library to interface with Juju. For more information about Juju see
//! [Juju](https://jujucharms.com/docs/stable/about-juju)
//!
//! A hello world Juju charm example in Rust:
//! You will need a working Juju environment for this to function properly. See [Setting up Juju]
//! (https://jujucharms.com/docs/stable/getting-started). After Juju is functioning see
//! [What makes a Charm](https://jujucharms.com/docs/stable/authors-charm-components) for the base
//! components of a charm.
//!
//! Our src/main.rs will contain the following:
//! # Examples
//! ```
//! extern crate juju;
//! extern crate log;
//! use std::env;
//! use log::LogLevel;
//!
//! fn config_changed()->Result<(), String>{
//! juju::log("Hello Juju from Rust!", Some(LogLevel::Debug));
//! return Ok(());
//! }
//!
//! fn main(){
//! let mut hook_registry: Vec<juju::Hook> = Vec::new();
//!
//! //Register our hooks with the Juju library
//! hook_registry.push(juju::Hook{
//! name: "config-changed".to_string(),
//! callback: config_changed,
//! });
//! let result = juju::process_hooks(hook_registry);
//!
//! if result.is_err(){
//! juju::log(&format!("Hook failed with error: {:?}", result.err()), Some(LogLevel::Error));
//! }else{
//! juju::log("Hook call was successful!", Some(LogLevel::Debug));
//! }
//! }
//! ```
//! Now you can build with `cargo build ` and install the binary in the hooks directory.
//!
//! Create a symlink in the hooks directory with `ln -s hello-world config-changed`. Juju will
//! attempt to run that symlink and our Juju library will map that to our config_changed function.
//!
//! We can test our hello-world charm by deploying with juju and watching the debug logs. See
//! [Deploying a Charm](https://jujucharms.com/docs/stable/charms-deploying) for more information.
//!
//! You should see a message in juju debug-log like this `unit-hello-world-0[6229]: 2015-08-21 16:16:05 INFO unit.hello-world/0.juju-log server.go:254 Hello Juju from Rust!`
//!
extern crate charmhelpers;
extern crate log;
use std::collections::HashMap;
use std::env;
use std::error::Error;
use std::str::FromStr;
use std::net::IpAddr;
use std::io;
use log::LogLevel;
pub use charmhelpers::core::hookenv::{log};
//Custom error handling for the library
#[derive(Debug)]
pub enum JujuError{
IoError(io::Error),
FromUtf8Error(std::string::FromUtf8Error),
ParseIntError(std::num::ParseIntError),
VarError(std::env::VarError),
AddrParseError(std::net::AddrParseError),
}
impl JujuError{
fn new(err: String) -> JujuError {
JujuError::IoError(
io::Error::new(std::io::ErrorKind::Other, err)
)
}
pub fn to_string(&self) -> String{
match *self {
JujuError::IoError(ref err) => err.description().to_string(),
JujuError::FromUtf8Error(ref err) => err.description().to_string(),
JujuError::ParseIntError(ref err) => err.description().to_string(),
JujuError::VarError(ref err) => err.description().to_string(),
JujuError::AddrParseError(ref err) => err.description().to_string(),
}
}
}
impl From<io::Error> for JujuError {
fn from(err: io::Error) -> JujuError {
JujuError::IoError(err)
}
}
impl From<std::string::FromUtf8Error> for JujuError {
fn from(err: std::string::FromUtf8Error) -> JujuError {
JujuError::FromUtf8Error(err)
}
}
impl From<std::num::ParseIntError> for JujuError {
fn from(err: std::num::ParseIntError) -> JujuError {
JujuError::ParseIntError(err)
}
}
impl From<std::env::VarError> for JujuError {
fn from(err: std::env::VarError) -> JujuError {
JujuError::VarError(err)
}
}
impl From<std::net::AddrParseError> for JujuError {
fn from(err: std::net::AddrParseError) -> JujuError {
JujuError::AddrParseError(err)
}
}
#[derive(Debug)]
pub enum Transport {
Tcp,
Udp,
}
impl Transport {
/// Returns a String representation of the enum variant
fn to_string(self) -> String {
match self {
Transport::Tcp => "tcp".to_string(),
Transport::Udp => "udp".to_string(),
}
}
}
#[derive(Debug)]
/// For information about what these StatusType variants mean see: [Status reference]
/// (https://jujucharms.com/docs/stable/reference-status)
pub enum StatusType{
Maintenance,
Waiting,
Active,
Blocked
}
impl StatusType {
/// Returns a String representation of the enum variant
pub fn to_string(self) -> String {
match self {
StatusType::Maintenance => "maintenance".to_string(),
StatusType::Waiting => "waiting".to_string(),
StatusType::Active => "active".to_string(),
StatusType::Blocked => "blocked".to_string(),
}
}
}
#[derive(Debug)]
pub struct Status{
/// The type of status
pub status_type: StatusType,
/// A message to show alongside the status
pub message: String,
}
#[derive(Debug)]
pub struct Context{
/// The scope for the current relation hook
pub relation_type: String,
/// The relation ID for the current relation hook
pub relation_id: usize,
/// Local unit ID
pub unit: String,
/// relation data for all related units
pub relations: HashMap<String,String>,
}
impl Context{
///Constructs a new `Context`
///Creates a context that's filled out from the env variables
/// # Example usage
/// ```
/// extern crate juju;
/// let context = juju::Context::new_from_env();
/// ```
pub fn new_from_env() -> Context{
let relations: HashMap<String,String> = HashMap::new();
//This variable is useless. It only shows "server" for everything
let relation_type = env::var("JUJU_RELATION").unwrap_or("".to_string());
let relation_id_str = env::var("JUJU_RELATION_ID").unwrap_or("".to_string());
let parts: Vec<&str> = relation_id_str.split(":").collect();
let relation_id: usize = parts[1].parse::<usize>().unwrap();
let unit = env::var("JUJU_UNIT_NAME").unwrap_or("".to_string());
Context{
relation_type: relation_type,
relation_id: relation_id,
unit: unit,
relations: relations,
}
}
}
#[derive(Debug)]
pub struct Relation {
/// The name of a unit related to your service
pub name: String,
/// The id of the unit related to your service
pub id: usize
}
pub struct Hook {
/// The name of the hook to call
pub name: String,
/// A function to call when Juju calls this hook
/// # Failures
/// Your function passed in needs to return a String on error so that users will
/// know what happened. Ideally this should also be logged with juju::log
pub callback: fn() -> Result<(),String>,
}
/// Returns 0 if the process completed successfully.
/// #Failures
/// Returns a String of the stderr if the process failed to execute
fn process_output(output: std::process::Output)->Result<i32, JujuError>{
let status = output.status;
if status.success(){
return Ok(0);
}else{
return Err(JujuError::new(
try!(String::from_utf8(output.stderr)))
);
}
}
/// This will reboot your juju instance. Examples of using this are when a new kernel is installed
/// and the virtual machine or server needs to be rebooted to use it.
/// # Failures
/// Returns stderr if the reboot command fails
pub fn reboot()->Result<i32,JujuError>{
let output = try!(run_command_no_args("juju-reboot", true));
return process_output(output);
}
/// action_get gets the value of the parameter at the given key
/// See [Juju Actions](https://jujucharms.com/docs/devel/authors-charm-actions) for more information
/// # Failures
/// Returns stderr if the action_get command fails
pub fn action_get(key: &str) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
let output = try!(run_command("action-get", &arg_list, false));
let value = try!(String::from_utf8(output.stdout));
return Ok(value.trim().to_string());
}
/// Get the name of the currently executing action
/// # Failures
/// Returns JujuError if the environment variable JUJU_ACTION_NAME does not exist
pub fn action_name() -> Result<String,JujuError>{
let name = try!(env::var("JUJU_ACTION_NAME"));
return Ok(name);
}
/// Get the uuid of the currently executing action
/// # Failures
/// Returns JujuError if the environment variable JUJU_ACTION_UUID does not exist
pub fn action_uuid() -> Result<String,JujuError>{
let uuid = try!(env::var("JUJU_ACTION_UUID"));
return Ok(uuid);
}
/// Get the tag of the currently executing action
/// # Failures
/// Returns JujuError if the environment variable JUJU_ACTION_TAG does not exist
pub fn action_tag() -> Result<String,JujuError>{
let tag = try!(env::var("JUJU_ACTION_TAG"));
return Ok(tag);
}
/// action_set permits the Action to set results in a map to be returned at completion of the Action.
/// See [Juju Actions](https://jujucharms.com/docs/devel/authors-charm-actions) for more information
/// # Failures
/// Returns stderr if the action_set command fails
pub fn action_set(key: &str, value: &str) -> Result<i32,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("{}={}", key, value));
let output = try!(run_command("action-set", &arg_list, false));
return process_output(output);
}
/// See [Juju Actions](https://jujucharms.com/docs/devel/authors-charm-actions) for more information
/// # Failures
/// Returns stderr if the action_fail command fails
pub fn action_fail(msg: &str) -> Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(msg.to_string());
let output = try!(run_command("action-fail", &arg_list, false));
return process_output(output);
}
/// This will return the private IP address associated with the unit.
/// It can be very useful for services that require communicating with the other units related
/// to it.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn unit_get_private_addr() ->Result<IpAddr, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("private-address".to_string());
let output = try!(run_command("unit-get", &arg_list, false));
let private_addr: String = try!(String::from_utf8(output.stdout));
let ip = try!(IpAddr::from_str(private_addr.trim()));
return Ok(ip);
}
/// This will return the public IP address associated with the unit.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn unit_get_public_addr() ->Result<IpAddr, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("public-address".to_string());
let output = try!(run_command("unit-get", &arg_list, false));
let public_addr = try!(String::from_utf8(output.stdout));
let ip = try!(IpAddr::from_str(public_addr.trim()));
return Ok(ip);
}
/// This will return a configuration item that corresponds to the key passed in
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn config_get(key: &str) ->Result<String, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
let output = try!(run_command("config-get", &arg_list, false));
let value = try!(String::from_utf8(output.stdout));
return Ok(value.trim().to_string());
}
/// config_get_all will return all configuration options as a HashMap<String,String>
/// # Failures
/// Returns a String of if the configuration options are not able to be transformed into a HashMap
pub fn config_get_all() -> Result<HashMap<String,String>, JujuError>{
let mut values: HashMap<String,String> = HashMap::new();
let arg_list: Vec<String> = vec!["--all".to_string()];
let output = try!(run_command("config-get", &arg_list, false));
let output_str = try!(String::from_utf8(output.stdout));
/* Example output:
"brick_paths: /mnt/brick1 /mnt/brick2\ncluster_type: Replicate\n"
*/
//For each line split at : and load the parts into the HashMap
for line in output_str.lines(){
let parts: Vec<&str> = line.split(":").filter(|s| !s.is_empty()).collect::<Vec<&str>>();
if ! parts.len() == 2{
//Skipping this possibly bogus value
continue;
}
let key = match parts.get(0){
Some(key) => key,
None => {
return Err(JujuError::new(
format!("Unable to get key from config-get from parts: {:?}", parts)));
}
};
let value = match parts.get(1){
Some(value) => value,
None => {
return Err(JujuError::new(
format!("Unable to get value from config-get from parts: {:?}", parts)));
}
};
values.insert(key.to_string(), value.to_string());
}
return Ok(values);
}
/// This will expose a port on the unit. The transport argument will indicate whether tcp or udp
/// should be exposed
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn open_port(port: usize, transport: Transport)->Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
let port_string = format!("{}/{}", port.to_string(), transport.to_string());
arg_list.push(port_string);
let output = try!(run_command("open-port", &arg_list, false));
return process_output(output);
}
/// This will hide a port on the unit. The transport argument will indicate whether tcp or udp
/// should be exposed
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn close_port(port: usize, transport: Transport)->Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
let port_string = format!("{}/{}", port.to_string() , transport.to_string());
arg_list.push(port_string);
let output = try!(run_command("close-port", &arg_list, false));
return process_output(output);
}
/// Set relation information for the current unit
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_set(key: &str, value: &str)->Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
let arg = format!("{}={}", key.clone(), value);
arg_list.push(arg);
let output = try!(run_command("relation-set", &arg_list, false));
return process_output(output);
}
/// Sets relation information using a specific relation ID. Used outside of relation hooks
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_set_by_id(key: &str, value: &str, id: &Relation) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("-r {}:{}", id.name , id.id.to_string()));
arg_list.push(format!("{}={}",key, value).to_string());
let output = try!(run_command("relation-get", &arg_list, false));
let relation = try!(String::from_utf8(output.stdout));
return Ok(relation);
}
/// Get relation information for the current unit
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_get(key: &str) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
let output = try!(run_command("relation-get", &arg_list, false));
let value = try!(String::from_utf8(output.stdout));
return Ok(value);
}
/// Get relation information for a specific unit
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_get_by_unit(key: &str, unit: &Relation) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
arg_list.push(format!("{}/{}", unit.name , unit.id.to_string()));
let output = try!(run_command("relation-get", &arg_list, false));
let relation = try!(String::from_utf8(output.stdout));
return Ok(relation);
}
/// Get relation information using a specific relation ID. Used outside of relation hooks
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_get_by_id(key: &str, id: &Relation) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("-r {}:{}", id.name , id.id.to_string()));
arg_list.push(key.to_string());
let output = try!(run_command("relation-get", &arg_list, false));
let relation = try!(String::from_utf8(output.stdout));
return Ok(relation);
}
/// Returns a list of all related units
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_list() ->Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let output = try!(run_command_no_args("relation-list", false));
let output_str = try!(String::from_utf8(output.stdout));
log(&format!("relation-list output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split('/').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
pub fn relation_ids() ->Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let output = try!(run_command_no_args("relation-ids", false));
let output_str: String = try!(String::from_utf8(output.stdout));
log(&format!("relation-ids output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split(':').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
/// Gets the relation IDs by their identifier
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_ids_by_identifier(id: &str) ->Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(id.to_string());
let output = try!(run_command("relation-ids", &arg_list, false));
let output_str: String = try!(String::from_utf8(output.stdout));
log(&format!("relation-ids output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split(':').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
/// Set the status of your unit to indicate to the Juju if everything is ok or something is wrong.
/// See the Status enum for information about what can be set.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn status_set(status: Status)->Result<i32,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(status.status_type.to_string());
arg_list.push(status.message);
let output = try!(run_command("status-set", &arg_list, false));
return process_output(output);
}
/// Retrieve the previously set juju workload state
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn status_get()->Result<String,JujuError>{
let output = try!(run_command_no_args("status-get", false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// If storage drives were allocated to your unit this will get the path of them.
/// In the storage-attaching hook this will tell you the location where the storage
/// is attached to. IE: /dev/xvdf for block devices or /mnt/{name} for filesystem devices
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn storage_get_location() ->Result<String, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("location".to_string());
let output = try!(run_command("storage-get", &arg_list, false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// Return the location of the mounted storage device. The mounted
/// storage devices can be gotten by calling storage_list() and
/// then passed into this function to get their mount location.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn storage_get(name: &str) ->Result<String, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("-s".to_string());
arg_list.push(name.to_string());
arg_list.push("location".to_string());
let output = try!(run_command("storage-get", &arg_list, false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// Used to list storage instances that are attached to the unit.
/// The names returned may be passed through to storage_get
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn storage_list() ->Result<String, JujuError>{
let output = try!(run_command_no_args("storage-list", false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// Call this to process your cmd line arguments and call any needed hooks
/// # Examples
/// ```
/// extern crate juju;
/// extern crate log;
/// use std::env;
///
/// fn config_changed()->Result<(), String>{
/// //Do nothing
/// return Ok(());
/// }
///
/// let mut hook_registry: Vec<juju::Hook> = Vec::new();
///
/// //Register our hooks with the Juju library
/// hook_registry.push(juju::Hook{
/// name: "config-changed".to_string(),
/// callback: config_changed,
/// });
/// let result = juju::process_hooks(hook_registry);
///
/// if result.is_err(){
/// juju::log(&format!("Hook failed with error: {:?}", result.err()), Some(log::LogLevel::Error));
/// }
/// ```
///
pub fn process_hooks(registry: Vec<Hook>)->Result<(),String>{
let hook_name = match charmhelpers::core::hookenv::hook_name() {
Some(s) => s,
_ => "".to_string(),
};
for hook in registry {
if hook_name.contains(&hook.name) {
return (hook.callback)();
}
}
return Err(format!("Warning: Unknown callback for hook {}", hook_name));
}
/// Returns true/false if this unit is the leader
/// # Failures
/// Will return stderr as a String if the function fails to run
/// # Examples
/// ```
/// extern crate juju;
/// let leader = match juju::is_leader(){
/// Ok(l) => l,
/// Err(e) => {
/// println!("Failed to run. Error was {:?}", e);
/// //Bail
/// return;
/// },
/// };
/// if leader{
/// println!("I am the leader!");
/// }else{
/// println!("I am not the leader. Maybe later I will be promoted");
/// }
/// ```
///
pub fn is_leader()->Result<bool, JujuError>{
let output = try!(run_command_no_args("is-leader", false));
let output_str: String = try!(String::from_utf8(output.stdout));
match output_str.trim().as_ref() {
"True" => Ok(true),
"False" => Ok(false),
_ => Ok(false),
}
}
fn run_command_no_args(command: &str, as_root: bool)-> Result<std::process::Output, JujuError>{
if as_root{
let mut cmd = std::process::Command::new("sudo");
let output = try!(cmd.output());
return Ok(output);
}else{
let mut cmd = std::process::Command::new(command);
let output = try!(cmd.output());
return Ok(output);
}
}
fn run_command(command: &str, arg_list: &Vec<String>, as_root: bool) -> Result<std::process::Output, JujuError>{
if as_root{
let mut cmd = std::process::Command::new("sudo");
cmd.arg(command);
for arg in arg_list{
cmd.arg(&arg);
}
let output = try!(cmd.output());
return Ok(output);
}else{
let mut cmd = std::process::Command::new(command);
for arg in arg_list{
cmd.arg(&arg);
}
let output = try!(cmd.output());
return Ok(output);
}
}
small tweaks
//! A library to interface with Juju. For more information about Juju see
//! [Juju](https://jujucharms.com/docs/stable/about-juju)
//!
//! A hello world Juju charm example in Rust:
//! You will need a working Juju environment for this to function properly. See [Setting up Juju]
//! (https://jujucharms.com/docs/stable/getting-started). After Juju is functioning see
//! [What makes a Charm](https://jujucharms.com/docs/stable/authors-charm-components) for the base
//! components of a charm.
//!
//! Our src/main.rs will contain the following:
//! # Examples
//! ```
//! extern crate juju;
//! extern crate log;
//! use std::env;
//! use log::LogLevel;
//!
//! fn config_changed()->Result<(), String>{
//! juju::log("Hello Juju from Rust!", Some(LogLevel::Debug));
//! return Ok(());
//! }
//!
//! fn main(){
//! let mut hook_registry: Vec<juju::Hook> = Vec::new();
//!
//! //Register our hooks with the Juju library
//! hook_registry.push(juju::Hook{
//! name: "config-changed".to_string(),
//! callback: config_changed,
//! });
//! let result = juju::process_hooks(hook_registry);
//!
//! if result.is_err(){
//! juju::log(&format!("Hook failed with error: {:?}", result.err()), Some(LogLevel::Error));
//! }else{
//! juju::log("Hook call was successful!", Some(LogLevel::Debug));
//! }
//! }
//! ```
//! Now you can build with `cargo build ` and install the binary in the hooks directory.
//!
//! Create a symlink in the hooks directory with `ln -s hello-world config-changed`. Juju will
//! attempt to run that symlink and our Juju library will map that to our config_changed function.
//!
//! We can test our hello-world charm by deploying with juju and watching the debug logs. See
//! [Deploying a Charm](https://jujucharms.com/docs/stable/charms-deploying) for more information.
//!
//! You should see a message in juju debug-log like this `unit-hello-world-0[6229]: 2015-08-21 16:16:05 INFO unit.hello-world/0.juju-log server.go:254 Hello Juju from Rust!`
//!
extern crate charmhelpers;
extern crate log;
use std::collections::HashMap;
use std::env;
use std::error::Error;
use std::str::FromStr;
use std::net::IpAddr;
use std::io;
use log::LogLevel;
pub use charmhelpers::core::hookenv::{log};
//Custom error handling for the library
#[derive(Debug)]
pub enum JujuError{
IoError(io::Error),
FromUtf8Error(std::string::FromUtf8Error),
ParseIntError(std::num::ParseIntError),
VarError(std::env::VarError),
AddrParseError(std::net::AddrParseError),
}
impl JujuError{
fn new(err: String) -> JujuError {
JujuError::IoError(
io::Error::new(std::io::ErrorKind::Other, err)
)
}
pub fn to_string(&self) -> String{
match *self {
JujuError::IoError(ref err) => err.description().to_string(),
JujuError::FromUtf8Error(ref err) => err.description().to_string(),
JujuError::ParseIntError(ref err) => err.description().to_string(),
JujuError::VarError(ref err) => err.description().to_string(),
JujuError::AddrParseError(ref err) => err.description().to_string(),
}
}
}
impl From<io::Error> for JujuError {
fn from(err: io::Error) -> JujuError {
JujuError::IoError(err)
}
}
impl From<std::string::FromUtf8Error> for JujuError {
fn from(err: std::string::FromUtf8Error) -> JujuError {
JujuError::FromUtf8Error(err)
}
}
impl From<std::num::ParseIntError> for JujuError {
fn from(err: std::num::ParseIntError) -> JujuError {
JujuError::ParseIntError(err)
}
}
impl From<std::env::VarError> for JujuError {
fn from(err: std::env::VarError) -> JujuError {
JujuError::VarError(err)
}
}
impl From<std::net::AddrParseError> for JujuError {
fn from(err: std::net::AddrParseError) -> JujuError {
JujuError::AddrParseError(err)
}
}
#[derive(Debug)]
pub enum Transport {
Tcp,
Udp,
}
impl Transport {
/// Returns a String representation of the enum variant
fn to_string(self) -> String {
match self {
Transport::Tcp => "tcp".to_string(),
Transport::Udp => "udp".to_string(),
}
}
}
#[derive(Debug)]
/// For information about what these StatusType variants mean see: [Status reference]
/// (https://jujucharms.com/docs/stable/reference-status)
pub enum StatusType{
Maintenance,
Waiting,
Active,
Blocked
}
impl StatusType {
/// Returns a String representation of the enum variant
pub fn to_string(self) -> String {
match self {
StatusType::Maintenance => "maintenance".to_string(),
StatusType::Waiting => "waiting".to_string(),
StatusType::Active => "active".to_string(),
StatusType::Blocked => "blocked".to_string(),
}
}
}
#[derive(Debug)]
pub struct Status{
/// The type of status
pub status_type: StatusType,
/// A message to show alongside the status
pub message: String,
}
#[derive(Debug)]
pub struct Context{
/// The scope for the current relation hook
pub relation_type: String,
/// The relation ID for the current relation hook
pub relation_id: usize,
/// Local unit ID
pub unit: String,
/// relation data for all related units
pub relations: HashMap<String,String>,
}
impl Context{
///Constructs a new `Context`
///Creates a context that's filled out from the env variables
/// # Example usage
/// ```
/// extern crate juju;
/// let context = juju::Context::new_from_env();
/// ```
pub fn new_from_env() -> Context{
let relations: HashMap<String,String> = HashMap::new();
//This variable is useless. It only shows "server" for everything
let relation_type = env::var("JUJU_RELATION").unwrap_or("".to_string());
let relation_id_str = env::var("JUJU_RELATION_ID").unwrap_or("".to_string());
let parts: Vec<&str> = relation_id_str.split(":").collect();
let relation_id: usize = parts[1].parse::<usize>().unwrap();
let unit = env::var("JUJU_UNIT_NAME").unwrap_or("".to_string());
Context{
relation_type: relation_type,
relation_id: relation_id,
unit: unit,
relations: relations,
}
}
}
#[derive(Debug)]
pub struct Relation {
/// The name of a unit related to your service
pub name: String,
/// The id of the unit related to your service
pub id: usize
}
pub struct Hook {
/// The name of the hook to call
pub name: String,
/// A function to call when Juju calls this hook
/// # Failures
/// Your function passed in needs to return a String on error so that users will
/// know what happened. Ideally this should also be logged with juju::log
pub callback: fn() -> Result<(),String>,
}
/// Returns 0 if the process completed successfully.
/// #Failures
/// Returns a String of the stderr if the process failed to execute
fn process_output(output: std::process::Output)->Result<i32, JujuError>{
let status = output.status;
if status.success(){
return Ok(0);
}else{
return Err(JujuError::new(
try!(String::from_utf8(output.stderr)))
);
}
}
/// This will reboot your juju instance. Examples of using this are when a new kernel is installed
/// and the virtual machine or server needs to be rebooted to use it.
/// # Failures
/// Returns stderr if the reboot command fails
pub fn reboot()->Result<i32,JujuError>{
let output = try!(run_command_no_args("juju-reboot", true));
return process_output(output);
}
/// action_get gets the value of the parameter at the given key
/// See [Juju Actions](https://jujucharms.com/docs/devel/authors-charm-actions) for more information
/// # Failures
/// Returns stderr if the action_get command fails
pub fn action_get(key: &str) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
let output = try!(run_command("action-get", &arg_list, false));
let value = try!(String::from_utf8(output.stdout));
return Ok(value.trim().to_string());
}
/// Get the name of the currently executing action
/// # Failures
/// Returns JujuError if the environment variable JUJU_ACTION_NAME does not exist
pub fn action_name() -> Result<String,JujuError>{
let name = try!(env::var("JUJU_ACTION_NAME"));
return Ok(name);
}
/// Get the uuid of the currently executing action
/// # Failures
/// Returns JujuError if the environment variable JUJU_ACTION_UUID does not exist
pub fn action_uuid() -> Result<String,JujuError>{
let uuid = try!(env::var("JUJU_ACTION_UUID"));
return Ok(uuid);
}
/// Get the tag of the currently executing action
/// # Failures
/// Returns JujuError if the environment variable JUJU_ACTION_TAG does not exist
pub fn action_tag() -> Result<String,JujuError>{
let tag = try!(env::var("JUJU_ACTION_TAG"));
return Ok(tag);
}
/// action_set permits the Action to set results in a map to be returned at completion of the Action.
/// See [Juju Actions](https://jujucharms.com/docs/devel/authors-charm-actions) for more information
/// # Failures
/// Returns stderr if the action_set command fails
pub fn action_set(key: &str, value: &str) -> Result<i32,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("{}={}", key, value));
let output = try!(run_command("action-set", &arg_list, false));
return process_output(output);
}
/// See [Juju Actions](https://jujucharms.com/docs/devel/authors-charm-actions) for more information
/// # Failures
/// Returns stderr if the action_fail command fails
pub fn action_fail(msg: &str) -> Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(msg.to_string());
let output = try!(run_command("action-fail", &arg_list, false));
return process_output(output);
}
/// This will return the private IP address associated with the unit.
/// It can be very useful for services that require communicating with the other units related
/// to it.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn unit_get_private_addr() ->Result<IpAddr, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("private-address".to_string());
let output = try!(run_command("unit-get", &arg_list, false));
let private_addr: String = try!(String::from_utf8(output.stdout));
let ip = try!(IpAddr::from_str(private_addr.trim()));
return Ok(ip);
}
/// This will return the public IP address associated with the unit.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn unit_get_public_addr() ->Result<IpAddr, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("public-address".to_string());
let output = try!(run_command("unit-get", &arg_list, false));
let public_addr = try!(String::from_utf8(output.stdout));
let ip = try!(IpAddr::from_str(public_addr.trim()));
return Ok(ip);
}
/// This will return a configuration item that corresponds to the key passed in
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn config_get(key: &str) ->Result<String, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
let output = try!(run_command("config-get", &arg_list, false));
let value = try!(String::from_utf8(output.stdout));
return Ok(value.trim().to_string());
}
/// config_get_all will return all configuration options as a HashMap<String,String>
/// # Failures
/// Returns a String of if the configuration options are not able to be transformed into a HashMap
pub fn config_get_all() -> Result<HashMap<String,String>, JujuError>{
let mut values: HashMap<String,String> = HashMap::new();
let arg_list: Vec<String> = vec!["--all".to_string()];
let output = try!(run_command("config-get", &arg_list, false));
let output_str = try!(String::from_utf8(output.stdout));
/* Example output:
"brick_paths: /mnt/brick1 /mnt/brick2\ncluster_type: Replicate\n"
*/
//For each line split at : and load the parts into the HashMap
for line in output_str.lines(){
let parts: Vec<&str> = line.split(":").filter(|s| !s.is_empty()).collect::<Vec<&str>>();
if ! parts.len() == 2{
//Skipping this possibly bogus value
continue;
}
let key = match parts.get(0){
Some(key) => key,
None => {
return Err(JujuError::new(
format!("Unable to get key from config-get from parts: {:?}", parts)));
}
};
let value = match parts.get(1){
Some(value) => value,
None => {
return Err(JujuError::new(
format!("Unable to get value from config-get from parts: {:?}", parts)));
}
};
values.insert(key.to_string(), value.to_string());
}
return Ok(values);
}
/// This will expose a port on the unit. The transport argument will indicate whether tcp or udp
/// should be exposed
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn open_port(port: usize, transport: Transport)->Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
let port_string = format!("{}/{}", port.to_string(), transport.to_string());
arg_list.push(port_string);
let output = try!(run_command("open-port", &arg_list, false));
return process_output(output);
}
/// This will hide a port on the unit. The transport argument will indicate whether tcp or udp
/// should be exposed
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn close_port(port: usize, transport: Transport)->Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
let port_string = format!("{}/{}", port.to_string() , transport.to_string());
arg_list.push(port_string);
let output = try!(run_command("close-port", &arg_list, false));
return process_output(output);
}
/// Set relation information for the current unit
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_set(key: &str, value: &str)->Result<i32, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
let arg = format!("{}={}", key.clone(), value);
arg_list.push(arg);
let output = try!(run_command("relation-set", &arg_list, false));
return process_output(output);
}
/// Sets relation information using a specific relation ID. Used outside of relation hooks
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_set_by_id(key: &str, value: &str, id: &Relation) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("-r {}:{}", id.name , id.id.to_string()));
arg_list.push(format!("{}={}",key, value).to_string());
let output = try!(run_command("relation-get", &arg_list, false));
let relation = try!(String::from_utf8(output.stdout));
return Ok(relation);
}
/// Get relation information for the current unit
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_get(key: &str) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
let output = try!(run_command("relation-get", &arg_list, false));
let value = try!(String::from_utf8(output.stdout));
return Ok(value);
}
/// Get relation information for a specific unit
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_get_by_unit(key: &str, unit: &Relation) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(key.to_string());
arg_list.push(format!("{}/{}", unit.name , unit.id.to_string()));
let output = try!(run_command("relation-get", &arg_list, false));
let relation = try!(String::from_utf8(output.stdout));
return Ok(relation);
}
/// Get relation information using a specific relation ID. Used outside of relation hooks
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_get_by_id(key: &str, id: &Relation, unit: &Relation) -> Result<String,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("-r {}:{} {} {}/{}",
id.name , id.id.to_string(),
key.to_string(),
unit.name, unit.id.to_string()));
let output = try!(run_command("relation-get", &arg_list, false));
let relation = try!(String::from_utf8(output.stdout));
return Ok(relation);
}
/// Returns a list of all related units
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_list() ->Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let output = try!(run_command_no_args("relation-list", false));
let output_str = try!(String::from_utf8(output.stdout));
log(&format!("relation-list output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split('/').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
/// Returns a list of all related units for the supplied identifier
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_list_by_id(id: &Relation) ->Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(format!("-r {}:{}", id.name, id.id.to_string()));
let output = try!(run_command("relation-list", &arg_list, false));
let output_str = try!(String::from_utf8(output.stdout));
log(&format!("relation-list output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split('/').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
pub fn relation_ids() ->Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let output = try!(run_command_no_args("relation-ids", false));
let output_str: String = try!(String::from_utf8(output.stdout));
log(&format!("relation-ids output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split(':').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
/// Gets the relation IDs by their identifier
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn relation_ids_by_identifier(id: &str) -> Result<Vec<Relation>, JujuError>{
let mut related_units: Vec<Relation> = Vec::new();
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(id.to_string());
let output = try!(run_command("relation-ids", &arg_list, false));
let output_str: String = try!(String::from_utf8(output.stdout));
log(&format!("relation-ids output: {}", output_str), Some(LogLevel::Debug));
for line in output_str.lines(){
let v: Vec<&str> = line.split(':').collect();
let id: usize = try!(v[1].parse::<usize>());
let r: Relation = Relation{
name: v[0].to_string(),
id: id,
};
related_units.push(r);
}
return Ok(related_units);
}
/// Set the status of your unit to indicate to the Juju if everything is ok or something is wrong.
/// See the Status enum for information about what can be set.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn status_set(status: Status)->Result<i32,JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push(status.status_type.to_string());
arg_list.push(status.message);
let output = try!(run_command("status-set", &arg_list, false));
return process_output(output);
}
/// Retrieve the previously set juju workload state
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn status_get()->Result<String,JujuError>{
let output = try!(run_command_no_args("status-get", false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// If storage drives were allocated to your unit this will get the path of them.
/// In the storage-attaching hook this will tell you the location where the storage
/// is attached to. IE: /dev/xvdf for block devices or /mnt/{name} for filesystem devices
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn storage_get_location() ->Result<String, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("location".to_string());
let output = try!(run_command("storage-get", &arg_list, false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// Return the location of the mounted storage device. The mounted
/// storage devices can be gotten by calling storage_list() and
/// then passed into this function to get their mount location.
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn storage_get(name: &str) ->Result<String, JujuError>{
let mut arg_list: Vec<String> = Vec::new();
arg_list.push("-s".to_string());
arg_list.push(name.to_string());
arg_list.push("location".to_string());
let output = try!(run_command("storage-get", &arg_list, false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// Used to list storage instances that are attached to the unit.
/// The names returned may be passed through to storage_get
/// # Failures
/// Will return a String of the stderr if the call fails
pub fn storage_list() ->Result<String, JujuError>{
let output = try!(run_command_no_args("storage-list", false));
return Ok(try!(String::from_utf8(output.stdout)));
}
/// Call this to process your cmd line arguments and call any needed hooks
/// # Examples
/// ```
/// extern crate juju;
/// extern crate log;
/// use std::env;
///
/// fn config_changed()->Result<(), String>{
/// //Do nothing
/// return Ok(());
/// }
///
/// let mut hook_registry: Vec<juju::Hook> = Vec::new();
///
/// //Register our hooks with the Juju library
/// hook_registry.push(juju::Hook{
/// name: "config-changed".to_string(),
/// callback: config_changed,
/// });
/// let result = juju::process_hooks(hook_registry);
///
/// if result.is_err(){
/// juju::log(&format!("Hook failed with error: {:?}", result.err()), Some(log::LogLevel::Error));
/// }
/// ```
///
pub fn process_hooks(registry: Vec<Hook>)->Result<(),String>{
let hook_name = match charmhelpers::core::hookenv::hook_name() {
Some(s) => s,
_ => "".to_string(),
};
for hook in registry {
if hook_name.contains(&hook.name) {
return (hook.callback)();
}
}
return Err(format!("Warning: Unknown callback for hook {}", hook_name));
}
/// Returns true/false if this unit is the leader
/// # Failures
/// Will return stderr as a String if the function fails to run
/// # Examples
/// ```
/// extern crate juju;
/// let leader = match juju::is_leader(){
/// Ok(l) => l,
/// Err(e) => {
/// println!("Failed to run. Error was {:?}", e);
/// //Bail
/// return;
/// },
/// };
/// if leader{
/// println!("I am the leader!");
/// }else{
/// println!("I am not the leader. Maybe later I will be promoted");
/// }
/// ```
///
pub fn is_leader()->Result<bool, JujuError>{
let output = try!(run_command_no_args("is-leader", false));
let output_str: String = try!(String::from_utf8(output.stdout));
match output_str.trim().as_ref() {
"True" => Ok(true),
"False" => Ok(false),
_ => Ok(false),
}
}
fn run_command_no_args(command: &str, as_root: bool)-> Result<std::process::Output, JujuError>{
if as_root{
let mut cmd = std::process::Command::new("sudo");
let output = try!(cmd.output());
return Ok(output);
}else{
let mut cmd = std::process::Command::new(command);
let output = try!(cmd.output());
return Ok(output);
}
}
fn run_command(command: &str, arg_list: &Vec<String>, as_root: bool) -> Result<std::process::Output, JujuError>{
if as_root{
let mut cmd = std::process::Command::new("sudo");
cmd.arg(command);
for arg in arg_list{
cmd.arg(&arg);
}
let output = try!(cmd.output());
return Ok(output);
}else{
let mut cmd = std::process::Command::new(command);
for arg in arg_list{
cmd.arg(&arg);
}
let output = try!(cmd.output());
return Ok(output);
}
}
|
/*!
Easy-to-use, high-level, OpenGL3+ wrapper.
# Initialization
This library defines the `DisplayBuild` trait which is curently implemented only on
`glutin::WindowBuilder`.
Initialization is done by creating a `WindowBuilder` and calling `build_glium`.
```no_run
extern crate glutin;
extern crate glium;
fn main() {
use glium::DisplayBuild;
let display = glutin::WindowBuilder::new()
.with_dimensions(1024, 768)
.with_title(format!("Hello world"))
.build_glium().unwrap();
}
```
The `display` object is the most important object of this library.
The window you are drawing on will produce events. They can be received by calling
`display.poll_events()`.
# Complete example
The first step is to create the vertex buffer, which contains the list of all the points that
make up our mesh. The elements that we pass to `VertexBuffer::new` must implement the
`glium::vertex::VertexFormat` trait, which can be easily added for any custom struct thanks to the
`#[vertex_format]` attribute.
See the `vertex` module documentation for more informations.
```no_run
# #![feature(plugin)]
#[plugin]
extern crate glium_macros;
# extern crate glium;
# fn main() {
#[vertex_format]
#[derive(Copy)]
struct Vertex {
position: [f32; 2],
color: [f32; 3],
}
# let display: glium::Display = unsafe { std::mem::uninitialized() };
let vertex = glium::VertexBuffer::new(&display, vec![
Vertex { position: [-0.5, -0.5], color: [0.0, 1.0, 0.0] },
Vertex { position: [ 0.0, 0.5], color: [0.0, 0.0, 1.0] },
Vertex { position: [ 0.5, -0.5], color: [1.0, 0.0, 0.0] },
]);
# }
```
We will also need to tell glium how the vertices must be linked together. We could create an index
buffer, but since we only have a single triangle the simpler solution here is not to use indices.
```no_run
use glium::index;
let indices = index::NoIndices(index::PrimitiveType::TrianglesList);
```
Next, we create the program, which is composed of a *vertex shader*, a program executed once for
each element in our vertex buffer, and a *fragment shader*, a program executed once for each
pixel before it is written on the final image.
The purpose of a program is to instruct the GPU how to process our mesh, in order to obtain pixels.
```no_run
# let display: glium::Display = unsafe { std::mem::uninitialized() };
let program = glium::Program::from_source(&display,
// vertex shader
" #version 110
uniform mat4 matrix;
attribute vec2 position;
attribute vec3 color;
varying vec3 v_color;
void main() {
gl_Position = vec4(position, 0.0, 1.0) * matrix;
v_color = color;
}
",
// fragment shader
" #version 110
varying vec3 v_color;
void main() {
gl_FragColor = vec4(v_color, 1.0);
}
",
// optional geometry shader
None
).unwrap();
```
*Note: teaching you the GLSL language is not covered by this guide.*
You may notice that the `attribute` declarations in the vertex shader match the field names and
types of the elements in the vertex buffer. This is required, otherwise drawing will result in
an error.
In the example above, one of our shaders contains `uniform mat4 matrix;`. Uniforms are global
variables in our program whose values are chosen by the application.
```no_run
# #[macro_use]
# extern crate glium;
# fn main() {
let uniforms = uniform! {
matrix: [
[ 1.0, 0.0, 0.0, 0.0 ],
[ 0.0, 1.0, 0.0, 0.0 ],
[ 0.0, 0.0, 1.0, 0.0 ],
[ 0.0, 0.0, 0.0, 1.0 ]
]
};
# }
```
The value of uniforms can be of any type that implements `glium::uniforms::UniformValue`.
This includes textures and samplers (not covered here). See the `uniforms` module documentation
for more informations.
Now that everything is initialized, we can finally draw something. The `display.draw()` function
will start drawing a new frame and return a `Frame` object. This `Frame` object has a `draw`
function, which you can use to draw things.
Its arguments are the source of vertices, source of indices, program, uniforms, and an object of
type `DrawParameters` which contains miscellaneous information specifying how everything should
be rendered (depth test, blending, backface culling, etc.).
```no_run
use glium::Surface;
# let display: glium::Display = unsafe { std::mem::uninitialized() };
# let vertex_buffer: glium::VertexBuffer<u8> = unsafe { std::mem::uninitialized() };
# let indices: glium::IndexBuffer = unsafe { std::mem::uninitialized() };
# let program: glium::Program = unsafe { std::mem::uninitialized() };
# let uniforms = glium::uniforms::EmptyUniforms;
let mut target = display.draw();
target.clear_color(0.0, 0.0, 0.0, 0.0); // filling the output with the black color
target.draw(&vertex_buffer, &indices, &program, &uniforms,
&std::default::Default::default()).unwrap();
target.finish();
```
*/
#![feature(slicing_syntax)]
#![feature(unboxed_closures)]
#![feature(unsafe_destructor)]
#![unstable]
#![allow(unstable)]
#![warn(missing_docs)]
// TODO: remove these when everything is implemented
#![allow(dead_code)]
#![allow(unused_variables)]
#[cfg(feature = "cgmath")]
extern crate cgmath;
extern crate glutin;
#[cfg(feature = "image")]
extern crate image;
extern crate libc;
#[cfg(feature = "nalgebra")]
extern crate nalgebra;
pub use context::{PollEventsIter, WaitEventsIter};
pub use index::IndexBuffer;
pub use vertex::{VertexBuffer, Vertex, VertexFormat};
pub use program::{Program, ProgramCreationError};
pub use program::ProgramCreationError::{CompilationError, LinkingError, ShaderTypeNotSupported};
pub use sync::{LinearSyncFence, SyncFence};
pub use texture::{Texture, Texture2d};
use std::collections::HashMap;
use std::ops::{Deref, DerefMut};
use std::sync::{Arc, Mutex};
use std::sync::mpsc::channel;
pub mod debug;
pub mod framebuffer;
pub mod index;
pub mod pixel_buffer;
pub mod macros;
pub mod program;
pub mod render_buffer;
pub mod uniforms;
pub mod vertex;
pub mod texture;
#[deprecated = "`index_buffer` has been renamed to `index`"]
#[allow(missing_docs)]
pub mod index_buffer {
pub use index::*;
}
mod buffer;
mod context;
mod fbo;
mod ops;
mod sampler_object;
mod sync;
mod vertex_array_object;
mod gl {
include!(concat!(env!("OUT_DIR"), "/gl_bindings.rs"));
}
/// Internal trait for objects that are OpenGL objects.
trait GlObject {
type Id;
/// Returns the id of the object.
fn get_id(&self) -> Self::Id;
}
/// Handle to a shader or a program.
// TODO: Handle(null()) is equal to Id(0)
#[derive(PartialEq, Eq, Copy, Clone, Debug, Hash)]
enum Handle {
Id(gl::types::GLuint),
Handle(gl::types::GLhandleARB),
}
unsafe impl Send for Handle {}
/// Internal trait for enums that can be turned into GLenum.
trait ToGlEnum {
/// Returns the value.
fn to_glenum(&self) -> gl::types::GLenum;
}
/// Function that the GPU will use for blending.
///
/// Blending happens at the end of the rendering process, when the GPU wants to write the
/// pixels over pixels that already exist in the framebuffer. The blending function allows
/// you to choose how it should merge the two.
///
/// If you want to add transparent objects one over another, the usual value
/// is `Addition { source: Alpha, destination: OneMinusAlpha }`.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BlendingFunction {
/// Simply overwrite the destination pixel with the source pixel.
///
/// The alpha channels are simply ignored. This is the default mode.
///
/// For example writing `(0.5, 0.9, 0.4, 0.2)` over `(0.9, 0.1, 0.4, 0.3)` will
/// result in `(0.5, 0.9, 0.4, 0.2)`.
AlwaysReplace,
/// For each individual component (red, green, blue, and alpha), the minimum value is chosen
/// between the source and the destination.
///
/// For example writing `(0.5, 0.9, 0.4, 0.2)` over `(0.9, 0.1, 0.4, 0.3)` will
/// result in `(0.5, 0.1, 0.4, 0.2)`.
Min,
/// For each individual component (red, green, blue, and alpha), the maximum value is chosen
/// between the source and the destination.
///
/// For example writing `(0.5, 0.9, 0.4, 0.2)` over `(0.9, 0.1, 0.4, 0.3)` will
/// result in `(0.9, 0.9, 0.4, 0.3)`.
Max,
/// For each individual component (red, green, blue, and alpha), a weighted addition
/// between the source and the destination.
///
/// The result is equal to `source_component * source_factor + dest_component * dest_factor`,
/// where `source_factor` and `dest_factor` are the values of `source` and `destination` of
/// this enum.
Addition {
/// The factor to apply to the source pixel.
source: LinearBlendingFactor,
/// The factor to apply to the destination pixel.
destination: LinearBlendingFactor,
},
/// For each individual component (red, green, blue, and alpha), a weighted substraction
/// of the source by the destination.
///
/// The result is equal to `source_component * source_factor - dest_component * dest_factor`,
/// where `source_factor` and `dest_factor` are the values of `source` and `destination` of
/// this enum.
Subtraction {
/// The factor to apply to the source pixel.
source: LinearBlendingFactor,
/// The factor to apply to the destination pixel.
destination: LinearBlendingFactor,
},
/// For each individual component (red, green, blue, and alpha), a weighted substraction
/// of the destination by the source.
///
/// The result is equal to `-source_component * source_factor + dest_component * dest_factor`,
/// where `source_factor` and `dest_factor` are the values of `source` and `destination` of
/// this enum.
ReverseSubtraction {
/// The factor to apply to the source pixel.
source: LinearBlendingFactor,
/// The factor to apply to the destination pixel.
destination: LinearBlendingFactor,
},
}
/// Indicates which value to multiply each component with.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LinearBlendingFactor {
/// Multiply the source or destination component by zero, which always
/// gives `0.0`.
Zero,
/// Multiply the source or destination component by one, which always
/// gives you the original value.
One,
/// Multiply the source or destination component by its corresponding value
/// in the source.
///
/// If you apply this to the source components, you get the values squared.
SourceColor,
/// Equivalent to `1 - SourceColor`.
OneMinusSourceColor,
/// Multiply the source or destination component by its corresponding value
/// in the destination.
///
/// If you apply this to the destination components, you get the values squared.
DestinationColor,
/// Equivalent to `1 - DestinationColor`.
OneMinusDestinationColor,
/// Multiply the source or destination component by the alpha value of the source.
SourceAlpha,
/// Multiply the source or destination component by `1.0` minus the alpha value of the source.
OneMinusSourceAlpha,
/// Multiply the source or destination component by the alpha value of the destination.
DestinationAlpha,
/// Multiply the source or destination component by `1.0` minus the alpha value of the
/// destination.
OneMinusDestinationAlpha,
}
impl ToGlEnum for LinearBlendingFactor {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
LinearBlendingFactor::Zero => gl::ZERO,
LinearBlendingFactor::One => gl::ONE,
LinearBlendingFactor::SourceColor => gl::SRC_COLOR,
LinearBlendingFactor::OneMinusSourceColor => gl::ONE_MINUS_SRC_COLOR,
LinearBlendingFactor::DestinationColor => gl::DST_COLOR,
LinearBlendingFactor::OneMinusDestinationColor => gl::ONE_MINUS_DST_COLOR,
LinearBlendingFactor::SourceAlpha => gl::SRC_ALPHA,
LinearBlendingFactor::OneMinusSourceAlpha => gl::ONE_MINUS_SRC_ALPHA,
LinearBlendingFactor::DestinationAlpha => gl::DST_ALPHA,
LinearBlendingFactor::OneMinusDestinationAlpha => gl::ONE_MINUS_DST_ALPHA,
}
}
}
/// Describes how triangles should be filtered before the fragment processing. Backface culling
/// is purely an optimization. If you don't know what this does, just use `CullingDisabled`.
///
/// # Backface culling
///
/// After the vertex shader stage, the GPU knows the 2D coordinates of each vertex of
/// each triangle.
///
/// For a given triangle, there are only two situations:
///
/// - The vertices are arranged in a clockwise direction on the screen.
/// - The vertices are arranged in a counterclockwise direction on the screen.
///
/// If you wish so, you can ask the GPU to discard all the primitives that belong to one
/// of these two categories.
///
/// ## Example
///
/// The vertices of this triangle are counter-clock-wise.
///
/// <svg width="556.84381" height="509.69049" version="1.1">
/// <g transform="translate(-95.156215,-320.37201)">
/// <path style="fill:none;stroke:#000000;stroke-width:4;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" d="M 324.25897,418.99654 539.42145,726.08292 212.13204,741.23521 z" />
/// <text style="font-size:40px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans" x="296.98483" y="400.81378"><tspan x="296.98483" y="400.81378">1</tspan></text>
/// <text style="font-size:40px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans" x="175.22902" y="774.8031"><tspan x="175.22902" y="774.8031">2</tspan></text>
/// <text style="font-size:40px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans" x="555.58386" y="748.30627"><tspan x="555.58386" y="748.30627">3</tspan></text>
/// </g>
/// </svg>
///
/// # Usage
///
/// The trick is that if you make a 180° rotation of a shape, all triangles that were
/// clockwise become counterclockwise and vice versa.
///
/// Therefore you can arrange your model so that the triangles that are facing the screen
/// are all either clockwise or counterclockwise, and all the triangle are *not* facing
/// the screen are the other one.
///
/// By doing so you can use backface culling to discard all the triangles that are not
/// facing the screen, and increase your framerate.
///
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BackfaceCullingMode {
/// All triangles are always drawn.
CullingDisabled,
/// Triangles whose vertices are counterclockwise won't be drawn.
CullCounterClockWise,
/// Triangles whose vertices are clockwise won't be drawn.
CullClockWise
}
/// The function that the GPU will use to determine whether to write over an existing pixel
/// on the target.
///
/// # Depth buffers
///
/// After the fragment shader has been run, the GPU maps the output Z coordinates to the depth
/// range (which you can specify in the draw parameters) in order to obtain the depth value in
/// in window coordinates. This depth value is always between `0.0` and `1.0`.
///
/// In addition to the buffer where pixel colors are stored, you can also have a buffer
/// which contains the depth value of each pixel. Whenever the GPU tries to write a pixel,
/// it will first compare the depth value of the pixel to be written with the depth value that
/// is stored at this location.
///
/// If you don't have a depth buffer available, you can only pass `Overwrite`. Glium detects if
/// you pass any other value and reports an error.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DepthFunction {
/// Never replace the target pixel.
///
/// This option doesn't really make sense, but is here for completeness.
Ignore,
/// Always replace the target pixel.
///
/// This is the default mode.
Overwrite,
/// Replace if the z-value of the source is equal to the destination.
IfEqual,
/// Replace if the z-value of the source is different than the destination.
IfNotEqual,
/// Replace if the z-value of the source is more than the destination.
IfMore,
/// Replace if the z-value of the source is more than, or equal to the destination.
IfMoreOrEqual,
/// Replace if the z-value of the source is less than the destination.
IfLess,
/// Replace if the z-value of the source is less than, or equal to the destination.
IfLessOrEqual
}
impl DepthFunction {
/// Returns true if the function requires a depth buffer to be used.
pub fn requires_depth_buffer(&self) -> bool {
match *self {
DepthFunction::Ignore => true,
DepthFunction::Overwrite => false,
DepthFunction::IfEqual => true,
DepthFunction::IfNotEqual => true,
DepthFunction::IfMore => true,
DepthFunction::IfMoreOrEqual => true,
DepthFunction::IfLess => true,
DepthFunction::IfLessOrEqual => true,
}
}
}
impl ToGlEnum for DepthFunction {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
DepthFunction::Ignore => gl::NEVER,
DepthFunction::Overwrite => gl::ALWAYS,
DepthFunction::IfEqual => gl::EQUAL,
DepthFunction::IfNotEqual => gl::NOTEQUAL,
DepthFunction::IfMore => gl::GREATER,
DepthFunction::IfMoreOrEqual => gl::GEQUAL,
DepthFunction::IfLess => gl::LESS,
DepthFunction::IfLessOrEqual => gl::LEQUAL,
}
}
}
/// Defines how the device should render polygons.
///
/// The usual value is `Fill`, which fills the content of polygon with the color. However other
/// values are sometimes useful, especially for debugging purposes.
///
/// # Example
///
/// The same triangle drawn respectively with `Fill`, `Line` and `Point` (barely visible).
///
/// <svg width="890.26135" height="282.59375" version="1.1">
/// <g transform="translate(0,-769.9375)">
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="M 124.24877,771.03979 258.59906,1051.8622 0,1003.3749 z" />
/// <path style="fill:none;fill-opacity:1;stroke:#ff0000;stroke-opacity:1" d="M 444.46713,771.03979 578.81742,1051.8622 320.21836,1003.3749 z" />
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="m 814.91074,385.7662 c 0,0.0185 -0.015,0.0335 -0.0335,0.0335 -0.0185,0 -0.0335,-0.015 -0.0335,-0.0335 0,-0.0185 0.015,-0.0335 0.0335,-0.0335 0.0185,0 0.0335,0.015 0.0335,0.0335 z" transform="matrix(18.833333,0,0,18.833333,-14715.306,-6262.0056)" />
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="m 814.91074,385.7662 c 0,0.0185 -0.015,0.0335 -0.0335,0.0335 -0.0185,0 -0.0335,-0.015 -0.0335,-0.0335 0,-0.0185 0.015,-0.0335 0.0335,-0.0335 0.0185,0 0.0335,0.015 0.0335,0.0335 z" transform="matrix(18.833333,0,0,18.833333,-14591.26,-6493.994)" />
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="m 814.91074,385.7662 c 0,0.0185 -0.015,0.0335 -0.0335,0.0335 -0.0185,0 -0.0335,-0.015 -0.0335,-0.0335 0,-0.0185 0.015,-0.0335 0.0335,-0.0335 0.0185,0 0.0335,0.015 0.0335,0.0335 z" transform="matrix(18.833333,0,0,18.833333,-14457.224,-6213.6135)" />
/// </g>
/// </svg>
///
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PolygonMode {
/// Only draw a single point at each vertex.
///
/// All attributes that apply to points are used when using this mode.
Point,
/// Only draw a line in the boundaries of each polygon.
///
/// All attributes that apply to lines (`line_width`) are used when using this mode.
Line,
/// Fill the content of the polygon. This is the default mode.
Fill,
}
impl ToGlEnum for PolygonMode {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
PolygonMode::Point => gl::POINT,
PolygonMode::Line => gl::LINE,
PolygonMode::Fill => gl::FILL,
}
}
}
/// Represents the parameters to use when drawing.
///
/// Example:
///
/// ```
/// let params = glium::DrawParameters {
/// depth_function: glium::DepthFunction::IfLess,
/// .. std::default::Default::default()
/// };
/// ```
///
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct DrawParameters {
/// The function that the GPU will use to determine whether to write over an existing pixel
/// on the target.
///
/// See the `DepthFunction` documentation for more details.
///
/// The default is `Overwrite`.
pub depth_function: DepthFunction,
/// The range of possible Z values in surface coordinates.
///
/// Just like OpenGL turns X and Y coordinates between `-1.0` and `1.0` into surface
/// coordinates, it will also map your Z coordinates to a certain range which you can
/// specify here.
///
/// The two values must be between `0.0` and `1.0`, anything outside this range will result
/// in a panic. By default the depth range is `(0.0, 1.0)`.
///
/// The first value of the tuple must be the "near" value, where `-1.0` will be mapped.
/// The second value must be the "far" value, where `1.0` will be mapped.
/// It is possible for the "near" value to be greater than the "far" value.
pub depth_range: (f32, f32),
/// The function that the GPU will use to merge the existing pixel with the pixel that is
/// being written.
///
/// `None` means "don't care" (usually when you know that the alpha is always 1).
pub blending_function: Option<BlendingFunction>,
/// Width in pixels of the lines to draw when drawing lines.
///
/// `None` means "don't care". Use this when you don't draw lines.
pub line_width: Option<f32>,
/// Whether or not the GPU should filter out some faces.
///
/// After the vertex shader stage, the GPU will try to remove the faces that aren't facing
/// the camera.
///
/// See the `BackfaceCullingMode` documentation for more infos.
pub backface_culling: BackfaceCullingMode,
/// How to render polygons. The default value is `Fill`.
///
/// See the documentation of `PolygonMode` for more infos.
pub polygon_mode: PolygonMode,
/// Whether multisample antialiasing (MSAA) should be used. Default value is `true`.
///
/// Note that you will need to set the appropriate option when creating the window.
/// The recommended way to do is to leave this to `true`, and adjust the option when
/// creating the window.
pub multisampling: bool,
/// Whether dithering is activated. Default value is `true`.
///
/// Dithering will smoothen the transition between colors in your color buffer.
pub dithering: bool,
/// The viewport to use when drawing.
///
/// The X and Y positions of your vertices are mapped to the viewport so that `(-1, -1)`
/// corresponds to the lower-left hand corner and `(1, 1)` corresponds to the top-right
/// hand corner. Any pixel outside of the viewport is discarded.
///
/// You can specify a viewport greater than the target if you want to stretch the image.
///
/// `None` means "use the whole surface".
pub viewport: Option<Rect>,
/// If specified, only pixels in this rect will be displayed. Default is `None`.
///
/// This is different from a viewport. The image will stretch to fill the viewport, but
/// not the scissor box.
pub scissor: Option<Rect>,
}
impl std::default::Default for DrawParameters {
fn default() -> DrawParameters {
DrawParameters {
depth_function: DepthFunction::Overwrite,
depth_range: (0.0, 1.0),
blending_function: Some(BlendingFunction::AlwaysReplace),
line_width: None,
backface_culling: BackfaceCullingMode::CullingDisabled,
polygon_mode: PolygonMode::Fill,
multisampling: true,
dithering: true,
viewport: None,
scissor: None,
}
}
}
impl DrawParameters {
/// Checks parameters and panics if something is wrong.
fn validate(&self) -> Result<(), DrawError> {
if self.depth_range.0 < 0.0 || self.depth_range.0 > 1.0 ||
self.depth_range.1 < 0.0 || self.depth_range.1 > 1.0
{
return Err(DrawError::InvalidDepthRange);
}
Ok(())
}
/// Synchronizes the parameters with the current ctxt.state.
fn sync(&self, ctxt: &mut context::CommandContext, surface_dimensions: (u32, u32)) {
// depth function
match self.depth_function {
DepthFunction::Overwrite => unsafe {
if ctxt.state.enabled_depth_test {
ctxt.gl.Disable(gl::DEPTH_TEST);
ctxt.state.enabled_depth_test = false;
}
},
depth_function => unsafe {
let depth_function = depth_function.to_glenum();
if ctxt.state.depth_func != depth_function {
ctxt.gl.DepthFunc(depth_function);
ctxt.state.depth_func = depth_function;
}
if !ctxt.state.enabled_depth_test {
ctxt.gl.Enable(gl::DEPTH_TEST);
ctxt.state.enabled_depth_test = true;
}
}
}
// depth range
if self.depth_range != ctxt.state.depth_range {
unsafe {
ctxt.gl.DepthRange(self.depth_range.0 as f64, self.depth_range.1 as f64);
}
ctxt.state.depth_range = self.depth_range;
}
// blending function
let blend_factors = match self.blending_function {
Some(BlendingFunction::AlwaysReplace) => unsafe {
if ctxt.state.enabled_blend {
ctxt.gl.Disable(gl::BLEND);
ctxt.state.enabled_blend = false;
}
None
},
Some(BlendingFunction::Min) => unsafe {
if ctxt.state.blend_equation != gl::MIN {
ctxt.gl.BlendEquation(gl::MIN);
ctxt.state.blend_equation = gl::MIN;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
None
},
Some(BlendingFunction::Max) => unsafe {
if ctxt.state.blend_equation != gl::MAX {
ctxt.gl.BlendEquation(gl::MAX);
ctxt.state.blend_equation = gl::MAX;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
None
},
Some(BlendingFunction::Addition { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_ADD {
ctxt.gl.BlendEquation(gl::FUNC_ADD);
ctxt.state.blend_equation = gl::FUNC_ADD;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
Some(BlendingFunction::Subtraction { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_SUBTRACT {
ctxt.gl.BlendEquation(gl::FUNC_SUBTRACT);
ctxt.state.blend_equation = gl::FUNC_SUBTRACT;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
Some(BlendingFunction::ReverseSubtraction { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_REVERSE_SUBTRACT {
ctxt.gl.BlendEquation(gl::FUNC_REVERSE_SUBTRACT);
ctxt.state.blend_equation = gl::FUNC_REVERSE_SUBTRACT;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
_ => None
};
if let Some((source, destination)) = blend_factors {
let source = source.to_glenum();
let destination = destination.to_glenum();
if ctxt.state.blend_func != (source, destination) {
unsafe { ctxt.gl.BlendFunc(source, destination) };
ctxt.state.blend_func = (source, destination);
}
};
// line width
if let Some(line_width) = self.line_width {
if ctxt.state.line_width != line_width {
unsafe {
ctxt.gl.LineWidth(line_width);
ctxt.state.line_width = line_width;
}
}
}
// back-face culling
// note: we never change the value of `glFrontFace`, whose default is GL_CCW
// that's why `CullClockWise` uses `GL_BACK` for example
match self.backface_culling {
BackfaceCullingMode::CullingDisabled => unsafe {
if ctxt.state.enabled_cull_face {
ctxt.gl.Disable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = false;
}
},
BackfaceCullingMode::CullCounterClockWise => unsafe {
if !ctxt.state.enabled_cull_face {
ctxt.gl.Enable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = true;
}
if ctxt.state.cull_face != gl::FRONT {
ctxt.gl.CullFace(gl::FRONT);
ctxt.state.cull_face = gl::FRONT;
}
},
BackfaceCullingMode::CullClockWise => unsafe {
if !ctxt.state.enabled_cull_face {
ctxt.gl.Enable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = true;
}
if ctxt.state.cull_face != gl::BACK {
ctxt.gl.CullFace(gl::BACK);
ctxt.state.cull_face = gl::BACK;
}
},
}
// polygon mode
unsafe {
let polygon_mode = self.polygon_mode.to_glenum();
if ctxt.state.polygon_mode != polygon_mode {
ctxt.gl.PolygonMode(gl::FRONT_AND_BACK, polygon_mode);
ctxt.state.polygon_mode = polygon_mode;
}
}
// multisampling
if ctxt.state.enabled_multisample != self.multisampling {
unsafe {
if self.multisampling {
ctxt.gl.Enable(gl::MULTISAMPLE);
ctxt.state.enabled_multisample = true;
} else {
ctxt.gl.Disable(gl::MULTISAMPLE);
ctxt.state.enabled_multisample = false;
}
}
}
// dithering
if ctxt.state.enabled_dither != self.dithering {
unsafe {
if self.dithering {
ctxt.gl.Enable(gl::DITHER);
ctxt.state.enabled_dither = true;
} else {
ctxt.gl.Disable(gl::DITHER);
ctxt.state.enabled_dither = false;
}
}
}
// viewport
if let Some(viewport) = self.viewport {
assert!(viewport.width <= ctxt.capabilities.max_viewport_dims.0 as u32,
"Viewport dimensions are too large");
assert!(viewport.height <= ctxt.capabilities.max_viewport_dims.1 as u32,
"Viewport dimensions are too large");
let viewport = (viewport.left as gl::types::GLint, viewport.bottom as gl::types::GLint,
viewport.width as gl::types::GLsizei,
viewport.height as gl::types::GLsizei);
if ctxt.state.viewport != viewport {
unsafe { ctxt.gl.Viewport(viewport.0, viewport.1, viewport.2, viewport.3); }
ctxt.state.viewport = viewport;
}
} else {
assert!(surface_dimensions.0 <= ctxt.capabilities.max_viewport_dims.0 as u32,
"Viewport dimensions are too large");
assert!(surface_dimensions.1 <= ctxt.capabilities.max_viewport_dims.1 as u32,
"Viewport dimensions are too large");
let viewport = (0, 0, surface_dimensions.0 as gl::types::GLsizei,
surface_dimensions.1 as gl::types::GLsizei);
if ctxt.state.viewport != viewport {
unsafe { ctxt.gl.Viewport(viewport.0, viewport.1, viewport.2, viewport.3); }
ctxt.state.viewport = viewport;
}
}
// scissor
if let Some(scissor) = self.scissor {
let scissor = (scissor.left as gl::types::GLint, scissor.bottom as gl::types::GLint,
scissor.width as gl::types::GLsizei,
scissor.height as gl::types::GLsizei);
unsafe {
if ctxt.state.scissor != scissor {
ctxt.gl.Scissor(scissor.0, scissor.1, scissor.2, scissor.3);
ctxt.state.scissor = scissor;
}
if !ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
}
} else {
unsafe {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
}
}
}
/// Area of a surface in pixels.
///
/// In the OpenGL ecosystem, the (0,0) coordinate is at the bottom-left hand corner of the images.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
pub struct Rect {
/// Number of pixels between the left border of the surface and the left border of
/// the rectangle.
pub left: u32,
/// Number of pixels between the bottom border of the surface and the bottom border
/// of the rectangle.
pub bottom: u32,
/// Width of the area in pixels.
pub width: u32,
/// Height of the area in pixels.
pub height: u32,
}
/// Object that can be drawn upon.
///
/// # What does the GPU do when you draw?
///
/// This is a summary of everything that happens when you call the `draw` function. Note that
/// this is not necessarly *exactly* what happens. Backends are free to do whatever they want
/// as long as it always matches the expected outcome.
///
/// ## Step 1: Vertex shader
///
/// For each vertex in the vertices source, the GPU invokes the vertex shader that is part
/// of the program, and passes the corresponding vertex's attributes to it.
///
/// The vertex shader *must* write the special `gl_Position` variable in order to indicate
/// the four-dimensions coordinates of the vertex. In order to understand what these coordinates
/// mean, see the "vertex post-processing" step below.
///
/// In addition to the position of the vertex, the vertex shader can also specify the values of
/// various vertex attributes.
///
/// ## Step 2: Tessellation (optional)
///
/// It is possible to use tessellation shaders, but glium does not support them yet.
///
/// ## Step 3: Geometry shader (optional)
///
/// If you specify a geometry shader, then the GPU will invoke it once for each primitive.
///
/// The geometry shader can output multiple primitives.
///
/// ## Step 4: Transform feedback (optional)
///
/// Transform feedback is not supported by glium for the moment.
///
/// ## Step 5: Vertex post-processing
///
/// The vertex shader step told the GPU what the coordinates of each vertex are, but these
/// coordinates have four dimensions, named `x`, `y`, `z` and `w`.
///
/// The GPU then computes the position of the vertex on the 2D surface you are drawing on, and
/// the depth of this vertex:
///
/// ```notrust
/// window_x = viewport_left + viewport_width * ((x / w) + 1.0) / 2.0
/// window_y = viewport_bottom + viewport_height * ((y / w) + 1.0) / 2.0
/// depth = depth_near + (depth_far - depth_near) * ((z / w) + 1.0) / 2.0
/// ```
///
/// *`viewport_left`, `viewport_width`, `viewport_bottom` and `viewport_height` correspond to
/// the `viewport` member of the draw parameters, and `depth_near` and `depth_far` correspond
/// to the `depth_range` member*.
///
/// This means that if `x / w`, `y / w` or `z / w` are equal to `-1.0`, then the result will be
/// `viewport_left`, `viewport_bottom` or `depth_near`. If they are equal to `1.0`, the result
/// will be `viewport_left + viewport_width` (the right of the viewport),
/// `viewport_bottom + viewport_height` (the top of the viewport) or `depth_far`.
///
/// For example if you want to draw a rectangle that covers the whole screen, it should be made
/// of four vertices whose coordinates are `(-1.0, -1.0, 0.0, 1.0)` (bottom-left corner),
/// `(-1.0, 1.0, 0.0, 1.0)` (top-left corner), `(1.0, 1.0, 0.0, 1.0)` (top-right corner) and
/// `(1.0, -1.0, 0.0, 1.0)` (bottom-right corner).
///
/// ## Step 6: Primitive assembly
///
/// The next step consists in building the primitives. Triangle strips, triangle fans and line
/// strips are turned into individual triangles or lines.
///
/// Triangle strips obey certain rules for the order of indices. For example the triangle strip
/// `0, 1, 2, 3, 4, 5` does *not* correspond to `0, 1, 2`, `1, 2, 3`, `2, 3, 4`, `3, 4, 5` as you
/// would expect, but to `0, 1, 2`, `1, 3, 2`, `2, 3, 4`, `3, 5, 4` (some indices are reversed).
/// This is important with regards to the face culling step below.
///
/// Then, if you did specify `PrimitiveMode`, it is used. If you specified `Line`, triangles are
/// turned into lines. If specified `Point`, triangles and lines are turned into points.
///
/// The GPU then looks at the screen coordinates of each primitive, and discards primitives that
/// are entirely outside of the window.
///
/// Note that points whose centers are outside of the viewport are discarded, even if the point
/// width would be big enough for the point to be visible. However this standard behavior is not
/// respected by nVidia drivers, which show the points anyway.
///
/// ## Step 7: Face culling (triangles only)
///
/// This step is purely an optimization step and only concerns triangles.
///
/// If you specify a value for `backface_culling` other than `CullingDisabled`, the GPU will
/// discard triangles depending on the way that the vertices are arranged on the window. You can
/// either discard triangles whose vertices are clockwise or counterclockwise.
///
/// For more information, see the `BackfaceCullingMode` documentation.
///
/// ## Step 8: Rasterization
///
/// Now that the GPU knows where on the window the various triangles, points or lines are, it will
/// determine which pixels of the surface are part of each primitive.
///
/// For points and lines, this step depends on the points width and line width that you specified
/// in the draw parameters.
///
/// <img alt="" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAnYAAAEsCAYAAABOqf71AAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAxOAAAMTgF/d4wjAAAAB3RJTUUH3wEIDBAooQVGygAAG4JJREFUeNrt3XuQXGWZx/HvmftMLjOZJJCLuZAEuYgJlIBKFBGQoNwCtQvlemPRVVaLhRUtlHG31KUVd7HEUgvwDy9LqaB7UVxL3dJV8a6sV5DLCkmASAwJyYRkMvfeP96ezJlmksxkumfOe/r7qUqR0wmZp5/z9ulfn/f0e0CSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEnSDEq64BLgFmBNhPUXgcTdqByKdWxvTuBvb4JvuQsP70Z4VQK3Aic6RiXH9hRtBa6tAz4RaajDg4ry/KEr0rpXFuEmd9+Ed/JHIw11Hn/l8Td7lgIfqQOWuQ8lVdAaWzBhy22BpApa3VD2wH0JfDeCwk8twjmp7Z8l8IOsF12E15cS9chHgluBvgjqvh4YGSt9pbozbRgWJnBVqtebgC9HUPeGBE5OPXRPAg9GMEY2MLZuTd79CXwjgn19AnBx6qHfJfDNCOq+HDhmZLsPvliEnVmvuwXeCjSXNgdLZ3mzrrkI16W2/5TAnRGMkVcAL0m9b3wXuC+2uhvK/vDHBXhP1p9EVxgw6WD3/ZvgvVmv+0Z4WZIKdg3wgffD7gj6fe3IWClCbwxj5EZYSyrYFeGRSOpuTwekInyxAHdHUHdHYrCbqv+9KY4xekWSCnZF+GUk7xsnp4PdZvj0v8EfIuj3m5JUsIthjLwfOgbGBrsnbopjjHw4HZCA/7opghMZ74Obi6m66zyWSpIk5YPBTpIkyWAnSZIkg50kSZIMdpIkSTLYSZIkGewkSZJksJMkSZLBTpIkSQY7SZIkg50kSZIMdpIkSTLYSZIkyWAnSZIkg50kSZLBTpIkSQY7SZIkGewkSZJksJMkSTLYSZIkyWAnSZIkg50kSZIMdpIkSQY7SZIkGewkSZJksJMkSZLBTpIkSQY7SZIkg50kSZIMdpIkSTLYCWgFGm2DJEky2MXveOBioNlWSJIkg13c1gBHAxuBNtshSZIMdnGaCyws/X4+cCkwx7ZIkiSDXXxWl223A5cB82yNJEky2MUd7ABmEc7cHWV7JEmSwS4Ocw4R3lqAS4AltkmSJBnssm/1Yf68EbgIWGmrJEmSwS7uYAdQD5wPHGu7JEky2CmbZhOWOJnofjwXOMm2SZJUu5IuKKa2HwB+FkHdJwEvTm3/Frgv60UX4cJkbFi7E+gf7+9ugs5NEw92B6yAp1fDjgqXfiXhzCCleu+MoNedSfiCyYitwLciqPuMBE5IPfQ94LEIXpPrCQtpA3QXoMPD6+F1wW7CN91J4P+KcG8EZa8CXpnafhj4UQR1nw8sZfRA9g1C/zOtCS6ndOehIgwl8LkIet0EvCG1/TRwTwR1nwqsS23/HLg/grpPA9YeLNgpIx4C9h3h/7soffSSpl9fIXy5R4cPdr14RxlJFVSHwS5z+qcQ6gC2AVtso2ZOgy2wV5JmRLEOSOxDtlRibmAHYf7O1K4Z+mwieyVp+iXlnxa/CXw+gsJfA7wxtf014EsR1P0B4LgDsRrenIxzcu7XcPreClyjtDmEux2vgN80wvAU/ql/JVwzAdADXBVBr1cAH0lt/xb4cAR1vxl4VWr748BPI6j7LYQv8BhWJh/sWku//wFwWwQ1vxS4NvUEftQdjhGZ1gnX1sMLRrZ/AV/pgZ1Zr/tMuKqudPwdhoGn4Lqs19wArUfDLamH/gi8L4Kx/VrC+rAjPl/KRVHVXR7sHinA3Vl/Bl2wuCzYPRhJ3dekg10C/1F47gm6WYR7wlbMz8N1PN840jfcrrEX6w5E0uu1ZcFuWyR1n1UW7H4aSd2vTAU7HeFnsUj2NelgNwRbPg1fzXrdN4Q3vwPB7lG4/zF4Iut1vxzelNoc/hx8Pes1r4W5F40NdjsjGdsnlwW730RS9ynpul3uJHtWV+HfXAxsTJ0ZkCRJOWSwq41gB7CAsPzHbFssSZLBTtXXRlitpFo6gMsorZslSZIMdqqe1VT/W8qzS+Fuge2WJMlgp+oGu+nQSrjQcpEtlyTJYKfKayN8yWG6NAMXA8ttvSRJBjtV1iqmf7HoBsKagKttvyRJBjtVzuoZHAPnMfbG85IkyWCnI9QKLJnBn58QFphd566QJMlgp6mZiWnY8awHTnd3SJJksNORW5OhWk4FXu4ukSTJYKfJm+lp2PG8kHDfT8eHJEkGO03CMWRjGrbc84HzgXp3kSRJBjtNzJoM17YSuHDYfSRJksFOh/YAtJC9adhySx+G+kF3lyRJBjsd3K9gRQz7YB/wMDDgLpMkyWCn8e0J19dFobcU7vqyeT2gJEky2M2cQaA/+9OwY/QBD0Ij0OkelCTJYKeS3eE/0Z39Kk3HXgoc5V6UJMlgJ2BX3OU3A5cAS92TkiQZ7GraEPBs/E+jEbiQiK4TlCTJYKeK2w0U8/FU6oENwHHuVUmSDHY1aVf+xtA5hNuQSZIkg13tGAL25POpvRw41T0sSZLBrmbkaBp2PKcD693LkiQZ7GrCrvw/xXXAK3EhY0mSDHZ5NgjJntp4qicA5zm+JEky2OXWTmgp1s7TXQ1cADS45yVJMtjlzg5oqbGnvAy4mLCgsSRJMtjlRlN3bQacRYS7VLQ5BCRJMtjlxcpi7X6hYAHh/rJzHAaSJBns8mB1jT//9lK4m+dQkCTJYBezRmC5bWA2sBFYaCskSTLYxWol4b6qglbCNXeLbYUkSQa7GK22BWM0ARcBK2yFJEkGu5g4DTu+BuDVwBpbIUmSwS4WK3CR3kONv1cBJ9oKSZIMdjFwGvbQEuAs4BRbIUmSwS7LGvE6sol6KfAS2yBJ0pErnyJ8Xlc4e5J15ddlLc9i3b+BpQ/ACakUPeaWYktg9SD0RdDvJPWb+pVwbJV+zrEL4cXnwu/qYKq31V1Vtt0ZydheUrZ9YqR1a/IWRbKvx1w6UQ9HvxnWR3AQ6yh7E1m5CGZFUHdd+vevDx+CM63xuX2dG8nYLr8efk2MdSddU38D1UE8BuyyDZPWSVgfJrEVseothGVtdBhdsJ/au4e0pCpyKrZKhoFu23BEngEeLfVQUWqyBfZKksEuV7oNJlPu3x+BIVsRI2cBJvcZUJIqpvwauz2EEyZZN5cwY5fOAZma9dwBrYNl/a2H9iT12GDodTGCQdLJ6MxocXCaxsgu4A8wfDz0NE6+T02Mve5rP/DnCMb2fGBOavtpYF9kde/30DphvYRb7QHsDYeOzJtF6raAPdDTC7sjeNOY3wDNI9uPQvcQDGa97jXQWVc6/g5A8ckIxkgdJCtgwch2H/T3hWNZprXA3Kaxx99nSrko6+YR7sc+brD7bAGuy/oz6Ao1fiz10G0FeG/GAvNfE74Ve8BZcH1L6qL+n8CHeiJ4E9wAtyal5zIMvd+Bf5zOn39PyHj3TCbgdMFa4Leph+4twPkRjO3bgKtTD11TgLsjqPt24G2lTU+0Tly6V/9egCsj2NdXAHelgt2374APZr3u6+FTDXDGyPZP4VNbYGvW634P3ELpmtVhGPwivDOCT3ltV8MdqZMYD34CLs963dfA9U3w1tRD/1SAWyN4Td4M3JAK1qqC5eWhTlP+NHJZ+hOJJEl6LoNddbgoceXNAS4NHwYlSZLBbnrUE1brUOW1ARuBRbZCkiSD3XRwGra6moGLgGW2QpIkg121rbEFVdcIXMBz7y4hSZLBThXjNOz0jt0NwPG2QpIkg101LMNp2OmUAGcD62yFJEkGu0pzGnZmrAdOsw2SJIOdKsVp2Jl1GvAy2yBJMtipEpbhDb1n2lrgHEZvfyZJUk1psAUV46LE2XAc0NgH25rthSSpxnjGrnJ9XGkbMmPVV+HMYfsgSTLY6QgsIyycq4zYB0c/gnejlyQZ7DR5TsNmM9zxMDBgKyRJBjtNoofH2IZs2l8Kd/u9nlSSZLDTBDwPp2EzrQ/4HRwFzLMbkiSDnQ7FadgI9IczdpcSAp4kSQY7jds/b0QfjxbgEmCprZAkGexUzmnY+DQCF+LyNJIkg53KeLYuTvXA+cDzbYUkyWCnkd4Z7OLef+cCJ9kKSZLBTksJ12wpbmcCL7INkiSDXW3z27D58WLgDNsgSTLY1aYEp2Hz5mTgrNK+lSTJYFdDnIbNpxOBV/m6kCQZ7GqL07D5tQZ4Dd6CTJJksKsJTsPm33LgIqDJVkiSDHb5tgRotQ25txjY6L6WJBns8s1p2NqxgHB/2dm2QpJksMsfp2FrTwdwWem/kiQZ7HJkMdBmG2rObMKZuwW2QpJksMuPNbagZrUSrrlbbCskSQa7+DkNqybCt2WX2wpJksEubotwGlZhfbvX4JdoJEkGu6g5Dav06+Y8wp0qJEky2EXIaVilJYR7y55sKyRJBru4LAZm2QaN4wzgxbZBkmSwi4fXU+lQXgScaRskSQY7g53y4STgXF9TkiSDXbYtwmlYTczzgfOBelshSZoJSRcUU9vdwI4I6m5n7F0AdgM7q/GDNkPLNmiuxL/VAO0JNI5sD8LO4tj+Z1IjzCd8WQCgOFClXld0YEN9A8wb2S5C/yDsmY6fPRcGj4Oe+iPbtwvDP3HAn4G9Ebwm03XvKYTXqA6jKxxzR/r2LLA9grJnlT7wAjAM+4bhmawXXQ8LE2gZ2e6DXUUYzHrdLeG9Likdx4r9cYyRpBmOSh9/h2Fb1ouug45k7PF3RykXZd18Ure9LA92KvN7oN82aJLagGNDmK9F/YUKfRiqgWDXT+rDniRVIKDqYPYZ6nSEeoCHgYHafPpORU9cYgskVdJ4JxRiOYOXVLvuXRHWHGuv81h3L/AQ4cK75iOvO8Ze93ponbC+suNwdPu7GH5lvu66suPBUCT9rn9u3VGMkfqyMTIcyRgZ5+BbjODFmBwq2H28ANdl/Ul0hRo/lnro5gK8two/6g3AnEr9Y2fB9S2phY7vhXf3wP6s93sD3DpybeAw7P9veFfWa14MS9fBjSPbPfDgvfDJGSqnB7iHCVyH1AW3AVenHnptAe6O4DV5O/C20mbmr1vKkHSvPl+AKyPY11cAd41sb4HvfiGM20y7Bt43N7Wg+JfgQ1tga9brfg/cUg+tpU9MAx+N4D16HrS+HW4Z2d4Gj34G3p/1ut8Aly8P9wMfqfvDn4HPRTC23zUX/iYVUHUQR1Uy1KmmtQGXAkfbCklSNRnsDs57w6qSmoGLgefZCkmSwW76uSixKq0RuAA4xlZIkgx208dpWFVLPWER4+NshSTJYDc9PFunakqAc4AX2gpJksHOYKd8eDlwqm2QJBnsqqf8lk5SNZ0OrLcNkiSDXXV4tk7TbR1wNt6FQJI0RQ22wGCnTDgeaByGxE9bkqQj5XvIWAuAdtugmfpQ8StYMWwfJEkGu8q8sdoCzaRumPUIB+5jKUmSwW4KvNuEZtw+4GFgwFZIkgx2R8xpWGXG/lK42wUtdkOSZLCbvFW2QFnSB/wwLIcyz25Ikgx2k+M0rDJnIJyxu5SwvqIkSQa7CZgPdNgGZVQLcAmwxFZIkgx2h+e3YZV1TcCFwApbIUky2BnsFL8G4NXAsbZCkmSwG18nXpyuuF6z5wIvsBWSJIPdc3m2TrFJgFcAp9gKSZLBzmCnfHhp6ZckSQY7whRsp8NAETuFcPYusRWSpFoPdp6tUx68gHDdnWfgJclgV9NclFh5cSzhG7MNtkKSDHa1qAOnYZUvKwhr3TXZCkky2NUaz9Ypj5YQ7lLRYiskyWBXS7y+Tnm1kHB/2dm2QpIMdrWgg3B/WCmv5pXCXbutkCSDXd55tk61YE4p3PkhRpIMdgY7KQfagI3AIlshSQa7PGoHFrjrVUOagYuAZbZCkgx2eePZOtWiRuACx78kGewMdlJ+Xu/nAcfbCkky2OXBXMJSEFKtSoCzgXW2QpIMdrHzbJ0UrAdOtw2SZLAz2Enx2wXUE75YIUnKiVq6Yfgc4Ch3uWpUEXgK2ARsBrptiSQZ7GLmvWFVa/qBJ0phbgvQZ0skyWCXF07DqhY8SzgjtxnYCgzbEkky2OWN07DKs+2lILcJ2Gk7JMlgl3eerVOeDBGmWDeXfvXYEkkSQNIVLqoeUYyp9onW/RCwL3s1x9Rv657hsd1AuBdeB2Exxrps1723EM6S6zC6YA9je+Vry5qtO5JskdW6Gw6zI4h0AB3Qn51QN6m6Y+23dVdOSwhySQcwK64+u4TKxLX62rJm67bmSsr9VOwu3zgU0RFkdghztJuOaoVfbpFU1WD3LPBMBHXPBealtruB3eP9xR3QNhgWYp1x9TA3SfV8MOTOYgSDZF7qk0txMIK8nEB9fchHoWgYGArjO1PqodgOg/NgsAOGGqCzlO9GPE0c19B1MjqluN9D64T1Ak2lMdozHMHxN4HWOpifegJ9/bA363W3wdwGaEy92e0Oh+Fsmw3zk9LxdxiKPbAjgjGSzIIFI9tDMDAYQd31MKdh7PF3F+FyiaybV8pF4wa7zxTguqw/g65Q48dSD91WgPeO/5rgjVmp+yy4vgVWjWz/BAo9EbwJboBbk9IBcRh6vwP/kPWaF8PSdXAjo0njj/fCJzNS3h5Gv8X6FKmzNl1wG3B16u9eU4C7I3hN3g68bTRHa4IO9GoAvv4vcE3WC347bJwHnx7ZfgJ+8WX4bNbrfge8swNOGtn+Bvzzo+FLSJn2HrijHtpKJwMGPj76OsushTDrrXDnyPY+eOgTGXovPsTYvmYeXJV66IMFuDWC4+/NwA0HC3Z5s8r3DWXkzXs7o3d9eMaWSJKqIe/BzmVONFMGGV2SZAsuSSJJMthNySxgsbtY06iH0bXlniCsNydJksGuApyG1XTYyegU63bbIUky2FXHGnevqmCYcA/WzaVfz9oSSZLBrrragEXuXlVIH+E6uU2EKdZ+WyJJMthNn1XEuzq3sqGbsUuSuISHJMlgN0OchtVkFYFtjE6xetMSSZLBLgPa8NuwmpgBwtTqJsJUa68tkSQZ7LLFaVgdyl5Gz8ptxSVJJEkGu0xzUWKN0QZ0hmvmvkK496okSQa7CLQCS9ytNW9oNjy1HGindIf1EOwMdZIkg11EnIatXb2kliS5HE6wJZIkg13cnIatLbsYvV5uGy5JIkky2OVGK7DUXZprRcKacpsJZ+a6bYkkSfkMdsfgNGwe9TN2SZI+WyJJUv6DndOw+fEsY5ckGbYlkiTVSLDrC8/Dadi4bU+FuR22Q5KkGg12f4IOoM7dGZUh4EnCFOtmoMeWSJJksGN7CHbKvv2MnpV7Ahi0JZIkGewOGAS6YY67MrOeYfRbrNtxSRJJkgx2B7M7JAW/DZsdw4QlSUamWPfYEkmSDHYTsst9mAV9wOOlIPc4LkkiSZLBbrKGCOtiaEbsYXSK9SlckkSSJIPdVJSmYTVNZgHtIU/fRbh2TpIkGewqw2nYqhsEnlgJw+1AY3hsyFAnSZLBrqKG8Kr8KulhdEmSJ4HBBZ4YlSTJYFdNTsNW1E5Gv8W63XZIkmSwm1ZOw07JMOEerJtLv/wOiiRJBruZ0Qf1TsMeSdvYwuiSJP22RJIkg92MexI6nYadkG7GLkli2yRJMthlyzZY0OK+G08xtOfAFKsz1pIkGewyrWk3zFvkvhsxADxBOCu3Bei1JZIkGexisdJ7w7KX0bNyWwmrv0iSJINddFbX6L7aweiSJE87dCVJUuzBrhFYXiP7ZoixS5LsdbhKkqQ8BbuVQH2O90cv4Tq5TYTr5gYcopIkKa/BLo/TsLsYPSu3DZckkSRJNRDscjMNOxvoAF4EX/lhWCxYkiSppoLdCuK9BdoA8Pga2D0/9SSOhj0/dAxKkqQqBbvnd8EVWSz0f2DdLjgaYBYsS/9ZCxx9PLwoS/U2QW8nbF8MT6+AvfXhJN2ssr92WRfsi2Cc1KXHTFbHyDgfBNIWRVJ3+eUGL+2K41hSq99Wr5h6WPZ22Jj1Otvg1PR2OyzYAKdnve4maE9vr4W1x8LSrNedpK4rr4O6C+FlWa+5EZrLtjv+CjZkve5mWFX20CmRvG8cP2bMdEVwTdcw8NvSfzN+wKOj9KvV9ynVrv2F8HLQYXRBj4cLSZXUQAh2mV7wtzujoS4B5paCXHv4VCIJmmyBvZI0I4oNRHAXhyzd8LQhFeTmMnaOUhIAg7ZgUr2qtw2SKiQpv8buAeBnWapwCOoeh2OHUxmqGZa0hDXtAOiHrfvD+m9V0Qp9nbB3ITw7D/ZPIQlfSOk6wZI7Q/mZd2Xqzae/VHfWdQKXprafBL4dQd1nACektr8HPBZB3esZvc7D+xVPXC+l65GG4LE++GnWC26AFU2p67wG4dH+jL1vjKcFzq6DxamH/hN4JoIx8gZGz+wOAZ+LoOamUt0jngbuiaDuU4F1qe2fA/dHUPdpwNrUa3SM7xTguowVvJqyiy7XwSsXp4LdHnjgPvhaBX/mMPAUYW25TeFHTF0X/Kgs2P1dAXZnfcR0wetSwW5/Ad4SQc1ry4LdA5HUfVtZsLujAHdHUPftlF3Aq0l/iP3lx+Dvs17n22FjOtgNwM9iqPsGuLss2L2/AL+L4LX1F6lgNxDJcayjLNg9FkndHy4LdncV4NYI6r75UMEui6br23b9hLN+mwlry/X5ViNJkmLSEEF9K6r47+9h9KzcU2T/i7eSJEnRBrvlVP7Lpn8uBbnNxHF9hSRJUi6CXSWmYQcJF85vIky19rjbJUmSwW561ZP6gsQk9RDOyG0uhTqXX5AkSQa7GTTZadidjE6xbnfXSpIkg112HHIaNgHmEBYKPh7uvy+CJSEkSZJqMdgdbBq2D9jyAliynDHLtfe7KyVJksEum5YxuiBjN2OXJCkeAye56yRJkuIIdnMIt9bZTLZuFStJkmSwm6Tfu2skSZImp84WSJIkGewkSZJksJMkSZLBTpIkSQY7SZIkg50kSZIMdpIkSTLYSZIkyWAnSZJksJMkSZLBTpIkSQY7SZIkGewkSZJksJMkSTLYSZIkyWAnSZIkg50kSZIMdpIkSQY7SZIkGewkSZJksJMkSZLBTpIkyWAnSZIkg50kSZIMdpIkSTLYSZIkyWAnSZJksJMkSZLBTpIkSdWVdEExtf0b4AcR1H0KcGZq+z7gxxHU/ZfAktT27UBfBHW/A2go/b6vVHfWLQBel9reAnw1grrPBl6Y2v4m8EhkdXcXoMPD6+F1wW6gHWAYHhmA72e95npY3QDnjGwPwUODcG/W626C1yTwvNRDXwB2RDBMrgaaS78fBD4VQc3NpbpHbAPujqDu9cCpqe17gV/HVnd5sJOkqTLYHUGwk6RKqAMetQ2SKuhJWzBhf7QFkipoax1wA7A10ifg2UblVaxj+0Hg3e6+CXsfsNkxKjm2K+BR4Fp3nyRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRN3f8DIrOQLp3kqkAAAAAASUVORK5CYII=" />
///
/// The attributes of each vertex are being interpolated, and the GPU assigns a value for each
/// attribute for each pixel.
///
/// ## Step 9: Fragment shader
///
/// The GPU now executes the fragment shader once for each pixel of each primitive.
///
/// The vertex attributes that were interpolated at the previous step are passed to the fragment
/// shader.
///
/// The fragment shader must return the color to write by setting the value of `gl_FragColor`.
///
/// ## Step 10: Pixel ownership
///
/// This step is mostly an implementation detail. If the window you are drawing on is not on the
/// foreground, or if it is partially obstructed, then the pixels that are not on the
/// foreground will be discarded.
///
/// This is only relevant if you draw to the default framebuffer.
///
/// ## Step 11: Scissor test
///
/// If `scissor` has been specified, then all the pixels that are outside of this rect
/// are discarded.
///
/// ## Step 12: Multisampling
///
/// ## Step 13: Stencil test
///
/// Stencil tests are currently not supported by glium.
///
/// ## Step 14: Depth test
///
/// In addition to the colors, surfaces can also have a depth buffer attached to it. In this
/// situation, just like each pixel has a color, each pixel of the surface also has an associated
/// depth value.
///
/// If a depth buffer is present, the GPU will compare the depth value of the pixel currently
/// being processed, with the existing depth value. Depending on the value of `depth_function`
/// in the draw parameters, the depth test will either pass, in which case the pipeline
/// continues, or fail, in which case the pixel is discarded.
///
/// The purpose of this test is to avoid drawing elements that are in the background of the
/// scene over elements that are in the foreground.
///
/// See the documentation of `DepthFunction` for more informations.
///
/// ## Step 15: Blending
///
/// For each pixel to write, the GPU takes the RGBA color that the fragment shader has returned
/// and the existing RGBA color already written on the surface, and merges the two.
///
/// The way they are merged depends on the value of `blending_function`. This allows you to choose
/// how alpha colors are merged together.
///
/// See the documentation of `BlendingFunction` fore more informations.
///
/// ## Step 16: Dithering (optional)
///
/// ## Step 17: End
///
/// This is finally the step where colors are being written.
///
/// ## Missing steps
///
/// Some steps are missing because they are not supported by glium for the moment: dithering,
/// occlusion query updating, logic operations, sRGB conversion, write masks.
///
/// Instancing and multiple viewports are also missing, as they are not supported.
///
pub trait Surface: Sized {
/// Clears some attachments of the target.
fn clear(&mut self, color: Option<(f32, f32, f32, f32)>, depth: Option<f32>,
stencil: Option<i32>);
/// Clears the color attachment of the target.
fn clear_color(&mut self, red: f32, green: f32, blue: f32, alpha: f32) {
self.clear(Some((red, green, blue, alpha)), None, None);
}
/// Clears the depth attachment of the target.
fn clear_depth(&mut self, value: f32) {
self.clear(None, Some(value), None);
}
/// Clears the stencil attachment of the target.
fn clear_stencil(&mut self, value: i32) {
self.clear(None, None, Some(value));
}
/// Clears the color and depth attachments of the target.
fn clear_color_and_depth(&mut self, color: (f32, f32, f32, f32), depth: f32) {
self.clear(Some(color), Some(depth), None);
}
/// Clears the color and stencil attachments of the target.
fn clear_color_and_stencil(&mut self, color: (f32, f32, f32, f32), stencil: i32) {
self.clear(Some(color), None, Some(stencil));
}
/// Clears the depth and stencil attachments of the target.
fn clear_depth_and_stencil(&mut self, depth: f32, stencil: i32) {
self.clear(None, Some(depth), Some(stencil));
}
/// Clears the color, depth and stencil attachments of the target.
fn clear_all(&mut self, color: (f32, f32, f32, f32), depth: f32, stencil: i32) {
self.clear(Some(color), Some(depth), Some(stencil));
}
/// Returns the dimensions in pixels of the target.
fn get_dimensions(&self) -> (u32, u32);
/// Returns the number of bits of each pixel of the depth buffer.
///
/// Returns `None` if there is no depth buffer.
fn get_depth_buffer_bits(&self) -> Option<u16>;
/// Returns true if the surface has a depth buffer available.
fn has_depth_buffer(&self) -> bool {
self.get_depth_buffer_bits().is_some()
}
/// Returns the number of bits of each pixel of the stencil buffer.
///
/// Returns `None` if there is no stencil buffer.
fn get_stencil_buffer_bits(&self) -> Option<u16>;
/// Returns true if the surface has a stencil buffer available.
fn has_stencil_buffer(&self) -> bool {
self.get_stencil_buffer_bits().is_some()
}
/// Draws.
///
/// See above for what happens exactly when you draw.
///
/// # Panic
///
/// - Panics if the requested depth function requires a depth buffer and none is attached.
/// - Panics if the type of some of the vertex source's attributes do not match the program's.
/// - Panics if a program's attribute is not in the vertex source (does *not* panic if a
/// vertex's attribute is not used by the program).
/// - Panics if the viewport is larger than the dimensions supported by the hardware.
/// - Panics if the depth range is outside of `(0, 1)`.
/// - Panics if a value in the uniforms doesn't match the type requested by the program.
///
fn draw<'a, 'b, V, I, U>(&mut self, V, &I, program: &Program, uniforms: U,
draw_parameters: &DrawParameters) -> Result<(), DrawError> where
V: vertex::MultiVerticesSource<'b>, I: index::ToIndicesSource,
U: uniforms::Uniforms;
/// Returns an opaque type that is used by the implementation of blit functions.
fn get_blit_helper(&self) -> BlitHelper;
/// Copies a rectangle of pixels from this surface to another surface.
///
/// The `source_rect` defines the area of the source (`self`) that will be copied, and the
/// `target_rect` defines the area where the copied image will be pasted. If the source and
/// target areas don't have the same dimensions, the image will be resized to match.
/// The `filter` parameter is relevant only in this situation.
///
/// It is possible for the source and the target to be the same surface. However if the
/// rectangles overlap, then the behavior is undefined.
///
/// Note that there is no alpha blending, depth/stencil checking, etc. This function just
/// copies pixels.
#[experimental = "The name will likely change"]
fn blit_color<S>(&self, source_rect: &Rect, target: &S, target_rect: &Rect,
filter: uniforms::MagnifySamplerFilter) where S: Surface
{
ops::blit(self, target, gl::COLOR_BUFFER_BIT, source_rect, target_rect,
filter.to_glenum())
}
/// Copies the entire surface to a target surface. See `blit_color`.
#[experimental = "The name will likely change"]
fn blit_whole_color_to<S>(&self, target: &S, target_rect: &Rect,
filter: uniforms::MagnifySamplerFilter) where S: Surface
{
let src_dim = self.get_dimensions();
let src_rect = Rect { left: 0, bottom: 0, width: src_dim.0 as u32, height: src_dim.1 as u32 };
self.blit_color(&src_rect, target, target_rect, filter)
}
/// Copies the entire surface to the entire target. See `blit_color`.
#[experimental = "The name will likely change"]
fn fill<S>(&self, target: &S, filter: uniforms::MagnifySamplerFilter) where S: Surface {
let src_dim = self.get_dimensions();
let src_rect = Rect { left: 0, bottom: 0, width: src_dim.0 as u32, height: src_dim.1 as u32 };
let target_dim = target.get_dimensions();
let target_rect = Rect { left: 0, bottom: 0, width: target_dim.0 as u32, height: target_dim.1 as u32 };
self.blit_color(&src_rect, target, &target_rect, filter)
}
}
/// Error that can happen while drawing.
#[derive(Clone, Debug)]
pub enum DrawError {
/// A depth function has been requested but no depth buffer is available.
NoDepthBuffer,
/// The type of a vertex attribute in the vertices source doesn't match what the
/// program requires.
AttributeTypeMismatch,
/// One of the attributes required by the program is missing from the vertex format.
///
/// Note that it is perfectly valid to have an attribute in the vertex format that is
/// not used by the program.
AttributeMissing,
/// The viewport's dimensions are not supported by the backend.
ViewportTooLarge,
/// The depth range is outside of the `(0, 1)` range.
InvalidDepthRange,
/// The type of a uniform doesn't match what the program requires.
UniformTypeMismatch {
/// Name of the uniform you are trying to bind.
name: String,
/// The expected type.
expected: uniforms::UniformType,
},
/// Tried to bind a uniform buffer to a single uniform value.
UniformBufferToValue {
/// Name of the uniform you are trying to bind.
name: String,
},
/// Tried to bind a single uniform value to a uniform block.
UniformValueToBlock {
/// Name of the uniform you are trying to bind.
name: String,
},
/// The layout of the content of the uniform buffer does not match the layout of the block.
UniformBlockLayoutMismatch {
/// Name of the block you are trying to bind.
name: String,
},
/// The number of vertices per patch that has been requested is not supported.
UnsupportedVerticesPerPatch,
/// Trying to use tessellation, but this is not supported by the underlying hardware.
TessellationNotSupported,
/// Using a program which contains tessellation shaders, but without submitting patches.
TessellationWithoutPatches,
/// Trying to use a sampler, but they are not supported by the backend.
SamplersNotSupported,
/// When you use instancing, all vertices sources must have the same size.
InstancesCountMismatch,
/// If you don't use indices, then all vertices sources must have the same size.
VerticesSourcesLengthMismatch,
}
impl std::fmt::Display for DrawError {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match self {
&DrawError::NoDepthBuffer => write!(fmt, "A depth function has been requested but no \
depth buffer is available."),
&DrawError::AttributeTypeMismatch => write!(fmt, "The type of a vertex attribute in \
the vertices source doesn't match \
what the program requires."),
&DrawError::AttributeMissing => write!(fmt, "One of the attributes required by the \
program is missing from the vertex \
format."),
&DrawError::ViewportTooLarge => write!(fmt, "The viewport's dimensions are not \
supported by the backend."),
&DrawError::InvalidDepthRange => write!(fmt, "The depth range is outside of the \
`(0, 1)` range."),
&DrawError::UniformTypeMismatch { ref name, ref expected } => {
write!(fmt, "The type of a uniform doesn't match what the program requires.")
},
&DrawError::UniformBufferToValue { ref name } => write!(fmt, "Tried to bind a uniform \
buffer to a single \
uniform value."),
&DrawError::UniformValueToBlock { ref name } => {
write!(fmt, "Tried to bind a single uniform value to a uniform block.")
},
&DrawError::UniformBlockLayoutMismatch { ref name } => {
write!(fmt, "The layout of the content of the uniform buffer does not match \
the layout of the block.")
},
&DrawError::UnsupportedVerticesPerPatch => write!(fmt, "The number of vertices per \
patch that has been requested \
is not supported."),
&DrawError::TessellationNotSupported => write!(fmt, "Trying to use tessellation, but \
this is not supported by the \
underlying hardware."),
&DrawError::TessellationWithoutPatches => write!(fmt, "Using a program which contains \
tessellation shaders, but \
without submitting patches."),
&DrawError::SamplersNotSupported => write!(fmt, "Trying to use a sampler, but they are \
not supported by the backend."),
&DrawError::InstancesCountMismatch => write!(fmt, "When you use instancing, all \
vertices sources must have the \
same size"),
&DrawError::VerticesSourcesLengthMismatch => write!(fmt, "If you don't use indices, \
then all vertices sources \
must have the same size."),
}
}
}
#[doc(hidden)]
pub struct BlitHelper<'a>(&'a Arc<DisplayImpl>, Option<&'a fbo::FramebufferAttachments>);
/// Implementation of `Surface`, targeting the default framebuffer.
///
/// The back- and front-buffers are swapped when the `Frame` is destroyed. This operation is
/// instantaneous, even when vsync is enabled.
pub struct Frame {
display: Display,
dimensions: (u32, u32),
}
impl Frame {
/// Stop drawing, and swap the buffers.
pub fn finish(self) {
}
}
impl Surface for Frame {
fn clear(&mut self, color: Option<(f32, f32, f32, f32)>, depth: Option<f32>,
stencil: Option<i32>)
{
ops::clear(&self.display.context, None, color, depth, stencil);
}
fn get_dimensions(&self) -> (u32, u32) {
self.dimensions
}
fn get_depth_buffer_bits(&self) -> Option<u16> {
self.display.context.context.capabilities().depth_bits
}
fn get_stencil_buffer_bits(&self) -> Option<u16> {
self.display.context.context.capabilities().stencil_bits
}
fn draw<'a, 'b, V, I, U>(&mut self, vertex_buffer: V,
index_buffer: &I, program: &Program, uniforms: U,
draw_parameters: &DrawParameters) -> Result<(), DrawError>
where I: index::ToIndicesSource, U: uniforms::Uniforms,
V: vertex::MultiVerticesSource<'b>
{
use index::ToIndicesSource;
if draw_parameters.depth_function.requires_depth_buffer() && !self.has_depth_buffer() {
return Err(DrawError::NoDepthBuffer);
}
if let Some(viewport) = draw_parameters.viewport {
if viewport.width > self.display.context.context.capabilities().max_viewport_dims.0
as u32
{
return Err(DrawError::ViewportTooLarge);
}
if viewport.height > self.display.context.context.capabilities().max_viewport_dims.1
as u32
{
return Err(DrawError::ViewportTooLarge);
}
}
ops::draw(&self.display, None, vertex_buffer.build_vertices_source().as_mut_slice(),
index_buffer.to_indices_source(), program, uniforms, draw_parameters,
(self.dimensions.0 as u32, self.dimensions.1 as u32))
}
fn get_blit_helper(&self) -> BlitHelper {
BlitHelper(&self.display.context, None)
}
}
#[unsafe_destructor]
impl Drop for Frame {
fn drop(&mut self) {
self.display.context.context.swap_buffers();
}
}
/// Objects that can build a `Display` object.
pub trait DisplayBuild {
/// Build a context and a `Display` to draw on it.
///
/// Performs a compatibility check to make sure that all core elements of glium
/// are supported by the implementation.
fn build_glium(self) -> Result<Display, GliumCreationError>;
}
/// Error that can happen while creating a glium display.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum GliumCreationError {
/// An error has happened while creating the glutin window or headless renderer.
GlutinCreationError(glutin::CreationError),
/// The OpenGL implementation is too old.
IncompatibleOpenGl(String),
}
impl std::fmt::Display for GliumCreationError {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
let self_error = self as &std::error::Error;
formatter.write_str(self_error.description())
}
}
impl std::error::Error for GliumCreationError {
fn description(&self) -> &str {
match self {
&GliumCreationError::GlutinCreationError(_) => "Error while creating glutin window or headless renderer",
&GliumCreationError::IncompatibleOpenGl(_) => "The OpenGL implementation is too old to work with glium",
}
}
fn cause(&self) -> Option<&std::error::Error> {
match self {
&GliumCreationError::GlutinCreationError(ref err) => Some(err as &std::error::Error),
&GliumCreationError::IncompatibleOpenGl(_) => None,
}
}
}
impl std::error::FromError<glutin::CreationError> for GliumCreationError {
fn from_error(err: glutin::CreationError) -> GliumCreationError {
GliumCreationError::GlutinCreationError(err)
}
}
impl<'a> DisplayBuild for glutin::WindowBuilder<'a> {
fn build_glium(self) -> Result<Display, GliumCreationError> {
let context = try!(context::new_from_window(self, None));
let display = Display {
context: Arc::new(DisplayImpl {
context: context,
debug_callback: Mutex::new(None),
framebuffer_objects: Some(fbo::FramebuffersContainer::new()),
vertex_array_objects: Mutex::new(HashMap::new()),
samplers: Mutex::new(HashMap::new()),
}),
};
display.init_debug_callback();
Ok(display)
}
}
#[cfg(feature = "headless")]
impl DisplayBuild for glutin::HeadlessRendererBuilder {
fn build_glium(self) -> Result<Display, GliumCreationError> {
let context = try!(context::new_from_headless(self));
let display = Display {
context: Arc::new(DisplayImpl {
context: context,
debug_callback: Mutex::new(None),
framebuffer_objects: Some(fbo::FramebuffersContainer::new()),
vertex_array_objects: Mutex::new(HashMap::new()),
samplers: Mutex::new(HashMap::new()),
}),
};
display.init_debug_callback();
Ok(display)
}
}
/// The main object of this library. Controls the whole display.
///
/// This object contains a smart pointer to the real implementation.
/// Cloning the display allows you to easily share the `Display` object throughout
/// your program and between threads.
#[derive(Clone)]
pub struct Display {
context: Arc<DisplayImpl>,
}
struct DisplayImpl {
// contains everything related to the current context and its state
context: context::Context,
// the callback used for debug messages
debug_callback: Mutex<Option<Box<FnMut(String, debug::Source, debug::MessageType, debug::Severity)
+ Send + Sync>>>,
// we maintain a list of FBOs
// the option is here to destroy the container
framebuffer_objects: Option<fbo::FramebuffersContainer>,
// we maintain a list of VAOs for each vertexbuffer-indexbuffer-program association
// the key is a (buffers-list, program) ; the buffers list must be sorted
vertex_array_objects: Mutex<HashMap<(Vec<gl::types::GLuint>, Handle),
vertex_array_object::VertexArrayObject>>,
// we maintain a list of samplers for each possible behavior
samplers: Mutex<HashMap<uniforms::SamplerBehavior, sampler_object::SamplerObject>>,
}
impl Display {
/// Reads all events received by the window.
///
/// This iterator polls for events and can be exhausted.
pub fn poll_events(&self) -> PollEventsIter {
self.context.context.poll_events()
}
/// Reads all events received by the window.
pub fn wait_events(&self) -> WaitEventsIter {
self.context.context.wait_events()
}
/// Returns the dimensions of the main framebuffer.
pub fn get_framebuffer_dimensions(&self) -> (u32, u32) {
self.context.context.get_framebuffer_dimensions()
}
/// Start drawing on the backbuffer.
///
/// This function returns a `Frame`, which can be used to draw on it. When the `Frame` is
/// destroyed, the buffers are swapped.
///
/// Note that destroying a `Frame` is immediate, even if vsync is enabled.
pub fn draw(&self) -> Frame {
Frame {
display: self.clone(),
dimensions: self.get_framebuffer_dimensions(),
}
}
/// Returns the maximum value that can be used for anisotropic filtering, or `None`
/// if the hardware doesn't support it.
pub fn get_max_anisotropy_support(&self) -> Option<u16> {
self.context.context.capabilities().max_texture_max_anisotropy.map(|v| v as u16)
}
/// Returns the maximum dimensions of the viewport.
///
/// Glium will panic if you request a larger viewport than this when drawing.
pub fn get_max_viewport_dimensions(&self) -> (u32, u32) {
let d = self.context.context.capabilities().max_viewport_dims;
(d.0 as u32, d.1 as u32)
}
/// Releases the shader compiler, indicating that no new programs will be created for a while.
///
/// # Features
///
/// This method is always available, but is a no-op if it's not available in
/// the implementation.
pub fn release_shader_compiler(&self) {
self.context.context.exec(move |: ctxt| {
unsafe {
if ctxt.opengl_es || ctxt.version >= &context::GlVersion(4, 1) {
ctxt.gl.ReleaseShaderCompiler();
}
}
});
}
/// Returns an estimate of the amount of video memory available in bytes.
///
/// Returns `None` if no estimate is available.
pub fn get_free_video_memory(&self) -> Option<usize> {
let (tx, rx) = channel();
self.context.context.exec(move |: ctxt| {
unsafe {
use std::mem;
let mut value: [gl::types::GLint; 4] = mem::uninitialized();
let value = if ctxt.extensions.gl_nvx_gpu_memory_info {
ctxt.gl.GetIntegerv(gl::GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX,
&mut value[0]);
Some(value[0])
} else if ctxt.extensions.gl_ati_meminfo {
ctxt.gl.GetIntegerv(gl::TEXTURE_FREE_MEMORY_ATI, &mut value[0]);
Some(value[0])
} else {
None
};
tx.send(value).ok();
}
});
rx.recv().unwrap().map(|v| v as usize * 1024)
}
// TODO: do this more properly
fn init_debug_callback(&self) {
if cfg!(ndebug) {
return;
}
// this is the C callback
extern "system" fn callback_wrapper(source: gl::types::GLenum, ty: gl::types::GLenum,
id: gl::types::GLuint, severity: gl::types::GLenum, _length: gl::types::GLsizei,
message: *const gl::types::GLchar, user_param: *mut libc::c_void)
{
let message = unsafe {
String::from_utf8(std::ffi::c_str_to_bytes(&message).to_vec()).unwrap()
};
if severity == gl::DEBUG_SEVERITY_HIGH ||
severity == gl::DEBUG_SEVERITY_MEDIUM
{
panic!("Debug message with high or medium severity: `{}`.\n\
Please report this error: https://github.com/tomaka/glium/issues", message);
}
}
// enabling the callback
self.context.context.exec(move |: ctxt| {
unsafe {
if ctxt.version >= &context::GlVersion(4,5) || ctxt.extensions.gl_khr_debug ||
ctxt.extensions.gl_arb_debug_output
{
if ctxt.state.enabled_debug_output_synchronous != true {
ctxt.gl.Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);
ctxt.state.enabled_debug_output_synchronous = true;
}
if ctxt.version >= &context::GlVersion(4,5) || ctxt.extensions.gl_khr_debug {
// TODO: with GLES, the GL_KHR_debug function has a `KHR` suffix
// but with GL only, it doesn't have one
ctxt.gl.DebugMessageCallback(callback_wrapper, std::ptr::null());
ctxt.gl.DebugMessageControl(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE, 0,
std::ptr::null(), gl::TRUE);
if ctxt.state.enabled_debug_output != Some(true) {
ctxt.gl.Enable(gl::DEBUG_OUTPUT);
ctxt.state.enabled_debug_output = Some(true);
}
} else {
ctxt.gl.DebugMessageCallbackARB(callback_wrapper, std::ptr::null());
ctxt.gl.DebugMessageControlARB(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE,
0, std::ptr::null(), gl::TRUE);
ctxt.state.enabled_debug_output = Some(true);
}
}
}
});
}
/// Reads the content of the front buffer.
///
/// You will only see the data that has finished being drawn.
///
/// This function can return any type that implements `Texture2dData`.
///
/// ## Example
///
/// ```no_run
/// # extern crate glium;
/// # extern crate glutin;
/// # fn main() {
/// # let display: glium::Display = unsafe { ::std::mem::uninitialized() };
/// let pixels: Vec<Vec<(u8, u8, u8)>> = display.read_front_buffer();
/// # }
/// ```
pub fn read_front_buffer<P, T>(&self) -> T // TODO: remove Clone for P
where P: texture::PixelValue + Clone + Send,
T: texture::Texture2dData<Data = P>
{
ops::read_from_default_fb(gl::FRONT_LEFT, self)
}
/// Asserts that there are no OpenGL errors pending.
///
/// This function should be used in tests.
pub fn assert_no_error(&self) {
let (tx, rx) = channel();
self.context.context.exec(move |: mut ctxt| {
tx.send(get_gl_error(&mut ctxt)).ok();
});
match rx.recv().unwrap() {
Some(msg) => panic!("{}", msg),
None => ()
};
}
/// Waits until all the previous commands have finished being executed.
///
/// When you execute OpenGL functions, they are not executed immediately. Instead they are
/// put in a queue. This function waits until all commands have finished being executed, and
/// the queue is empty.
///
/// **You don't need to call this function manually, except when running benchmarks.**
pub fn synchronize(&self) {
let (tx, rx) = channel();
self.context.context.exec(move |: ctxt| {
unsafe { ctxt.gl.Finish(); }
tx.send(()).ok();
});
rx.recv().unwrap();
}
}
// this destructor is here because objects in `Display` contain an `Arc<DisplayImpl>`,
// which would lead to a leak
impl Drop for DisplayImpl {
fn drop(&mut self) {
// disabling callback
self.context.exec(move |: ctxt| {
unsafe {
if ctxt.state.enabled_debug_output != Some(false) {
if ctxt.version >= &context::GlVersion(4,5) || ctxt.extensions.gl_khr_debug {
ctxt.gl.Disable(gl::DEBUG_OUTPUT);
} else if ctxt.extensions.gl_arb_debug_output {
ctxt.gl.DebugMessageCallbackARB(std::mem::transmute(0us),
std::ptr::null());
}
ctxt.state.enabled_debug_output = Some(false);
ctxt.gl.Finish();
}
}
});
{
let fbos = self.framebuffer_objects.take();
fbos.unwrap().cleanup(&self.context);
}
{
let mut vaos = self.vertex_array_objects.lock().unwrap();
vaos.clear();
}
{
let mut samplers = self.samplers.lock().unwrap();
samplers.clear();
}
}
}
#[allow(dead_code)]
fn get_gl_error(ctxt: &mut context::CommandContext) -> Option<&'static str> {
match unsafe { ctxt.gl.GetError() } {
gl::NO_ERROR => None,
gl::INVALID_ENUM => Some("GL_INVALID_ENUM"),
gl::INVALID_VALUE => Some("GL_INVALID_VALUE"),
gl::INVALID_OPERATION => Some("GL_INVALID_OPERATION"),
gl::INVALID_FRAMEBUFFER_OPERATION => Some("GL_INVALID_FRAMEBUFFER_OPERATION"),
gl::OUT_OF_MEMORY => Some("GL_OUT_OF_MEMORY"),
gl::STACK_UNDERFLOW => Some("GL_STACK_UNDERFLOW"),
gl::STACK_OVERFLOW => Some("GL_STACK_OVERFLOW"),
_ => Some("Unknown glGetError return value")
}
}
Fix minor error with blending function docs
/*!
Easy-to-use, high-level, OpenGL3+ wrapper.
# Initialization
This library defines the `DisplayBuild` trait which is curently implemented only on
`glutin::WindowBuilder`.
Initialization is done by creating a `WindowBuilder` and calling `build_glium`.
```no_run
extern crate glutin;
extern crate glium;
fn main() {
use glium::DisplayBuild;
let display = glutin::WindowBuilder::new()
.with_dimensions(1024, 768)
.with_title(format!("Hello world"))
.build_glium().unwrap();
}
```
The `display` object is the most important object of this library.
The window you are drawing on will produce events. They can be received by calling
`display.poll_events()`.
# Complete example
The first step is to create the vertex buffer, which contains the list of all the points that
make up our mesh. The elements that we pass to `VertexBuffer::new` must implement the
`glium::vertex::VertexFormat` trait, which can be easily added for any custom struct thanks to the
`#[vertex_format]` attribute.
See the `vertex` module documentation for more informations.
```no_run
# #![feature(plugin)]
#[plugin]
extern crate glium_macros;
# extern crate glium;
# fn main() {
#[vertex_format]
#[derive(Copy)]
struct Vertex {
position: [f32; 2],
color: [f32; 3],
}
# let display: glium::Display = unsafe { std::mem::uninitialized() };
let vertex = glium::VertexBuffer::new(&display, vec![
Vertex { position: [-0.5, -0.5], color: [0.0, 1.0, 0.0] },
Vertex { position: [ 0.0, 0.5], color: [0.0, 0.0, 1.0] },
Vertex { position: [ 0.5, -0.5], color: [1.0, 0.0, 0.0] },
]);
# }
```
We will also need to tell glium how the vertices must be linked together. We could create an index
buffer, but since we only have a single triangle the simpler solution here is not to use indices.
```no_run
use glium::index;
let indices = index::NoIndices(index::PrimitiveType::TrianglesList);
```
Next, we create the program, which is composed of a *vertex shader*, a program executed once for
each element in our vertex buffer, and a *fragment shader*, a program executed once for each
pixel before it is written on the final image.
The purpose of a program is to instruct the GPU how to process our mesh, in order to obtain pixels.
```no_run
# let display: glium::Display = unsafe { std::mem::uninitialized() };
let program = glium::Program::from_source(&display,
// vertex shader
" #version 110
uniform mat4 matrix;
attribute vec2 position;
attribute vec3 color;
varying vec3 v_color;
void main() {
gl_Position = vec4(position, 0.0, 1.0) * matrix;
v_color = color;
}
",
// fragment shader
" #version 110
varying vec3 v_color;
void main() {
gl_FragColor = vec4(v_color, 1.0);
}
",
// optional geometry shader
None
).unwrap();
```
*Note: teaching you the GLSL language is not covered by this guide.*
You may notice that the `attribute` declarations in the vertex shader match the field names and
types of the elements in the vertex buffer. This is required, otherwise drawing will result in
an error.
In the example above, one of our shaders contains `uniform mat4 matrix;`. Uniforms are global
variables in our program whose values are chosen by the application.
```no_run
# #[macro_use]
# extern crate glium;
# fn main() {
let uniforms = uniform! {
matrix: [
[ 1.0, 0.0, 0.0, 0.0 ],
[ 0.0, 1.0, 0.0, 0.0 ],
[ 0.0, 0.0, 1.0, 0.0 ],
[ 0.0, 0.0, 0.0, 1.0 ]
]
};
# }
```
The value of uniforms can be of any type that implements `glium::uniforms::UniformValue`.
This includes textures and samplers (not covered here). See the `uniforms` module documentation
for more informations.
Now that everything is initialized, we can finally draw something. The `display.draw()` function
will start drawing a new frame and return a `Frame` object. This `Frame` object has a `draw`
function, which you can use to draw things.
Its arguments are the source of vertices, source of indices, program, uniforms, and an object of
type `DrawParameters` which contains miscellaneous information specifying how everything should
be rendered (depth test, blending, backface culling, etc.).
```no_run
use glium::Surface;
# let display: glium::Display = unsafe { std::mem::uninitialized() };
# let vertex_buffer: glium::VertexBuffer<u8> = unsafe { std::mem::uninitialized() };
# let indices: glium::IndexBuffer = unsafe { std::mem::uninitialized() };
# let program: glium::Program = unsafe { std::mem::uninitialized() };
# let uniforms = glium::uniforms::EmptyUniforms;
let mut target = display.draw();
target.clear_color(0.0, 0.0, 0.0, 0.0); // filling the output with the black color
target.draw(&vertex_buffer, &indices, &program, &uniforms,
&std::default::Default::default()).unwrap();
target.finish();
```
*/
#![feature(slicing_syntax)]
#![feature(unboxed_closures)]
#![feature(unsafe_destructor)]
#![unstable]
#![allow(unstable)]
#![warn(missing_docs)]
// TODO: remove these when everything is implemented
#![allow(dead_code)]
#![allow(unused_variables)]
#[cfg(feature = "cgmath")]
extern crate cgmath;
extern crate glutin;
#[cfg(feature = "image")]
extern crate image;
extern crate libc;
#[cfg(feature = "nalgebra")]
extern crate nalgebra;
pub use context::{PollEventsIter, WaitEventsIter};
pub use index::IndexBuffer;
pub use vertex::{VertexBuffer, Vertex, VertexFormat};
pub use program::{Program, ProgramCreationError};
pub use program::ProgramCreationError::{CompilationError, LinkingError, ShaderTypeNotSupported};
pub use sync::{LinearSyncFence, SyncFence};
pub use texture::{Texture, Texture2d};
use std::collections::HashMap;
use std::ops::{Deref, DerefMut};
use std::sync::{Arc, Mutex};
use std::sync::mpsc::channel;
pub mod debug;
pub mod framebuffer;
pub mod index;
pub mod pixel_buffer;
pub mod macros;
pub mod program;
pub mod render_buffer;
pub mod uniforms;
pub mod vertex;
pub mod texture;
#[deprecated = "`index_buffer` has been renamed to `index`"]
#[allow(missing_docs)]
pub mod index_buffer {
pub use index::*;
}
mod buffer;
mod context;
mod fbo;
mod ops;
mod sampler_object;
mod sync;
mod vertex_array_object;
mod gl {
include!(concat!(env!("OUT_DIR"), "/gl_bindings.rs"));
}
/// Internal trait for objects that are OpenGL objects.
trait GlObject {
type Id;
/// Returns the id of the object.
fn get_id(&self) -> Self::Id;
}
/// Handle to a shader or a program.
// TODO: Handle(null()) is equal to Id(0)
#[derive(PartialEq, Eq, Copy, Clone, Debug, Hash)]
enum Handle {
Id(gl::types::GLuint),
Handle(gl::types::GLhandleARB),
}
unsafe impl Send for Handle {}
/// Internal trait for enums that can be turned into GLenum.
trait ToGlEnum {
/// Returns the value.
fn to_glenum(&self) -> gl::types::GLenum;
}
/// Function that the GPU will use for blending.
///
/// Blending happens at the end of the rendering process, when the GPU wants to write the
/// pixels over pixels that already exist in the framebuffer. The blending function allows
/// you to choose how it should merge the two.
///
/// If you want to add transparent objects one over another, the usual value
/// is `Addition { source: SourceAlpha, destination: OneMinusSourceAlpha }`.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BlendingFunction {
/// Simply overwrite the destination pixel with the source pixel.
///
/// The alpha channels are simply ignored. This is the default mode.
///
/// For example writing `(0.5, 0.9, 0.4, 0.2)` over `(0.9, 0.1, 0.4, 0.3)` will
/// result in `(0.5, 0.9, 0.4, 0.2)`.
AlwaysReplace,
/// For each individual component (red, green, blue, and alpha), the minimum value is chosen
/// between the source and the destination.
///
/// For example writing `(0.5, 0.9, 0.4, 0.2)` over `(0.9, 0.1, 0.4, 0.3)` will
/// result in `(0.5, 0.1, 0.4, 0.2)`.
Min,
/// For each individual component (red, green, blue, and alpha), the maximum value is chosen
/// between the source and the destination.
///
/// For example writing `(0.5, 0.9, 0.4, 0.2)` over `(0.9, 0.1, 0.4, 0.3)` will
/// result in `(0.9, 0.9, 0.4, 0.3)`.
Max,
/// For each individual component (red, green, blue, and alpha), a weighted addition
/// between the source and the destination.
///
/// The result is equal to `source_component * source_factor + dest_component * dest_factor`,
/// where `source_factor` and `dest_factor` are the values of `source` and `destination` of
/// this enum.
Addition {
/// The factor to apply to the source pixel.
source: LinearBlendingFactor,
/// The factor to apply to the destination pixel.
destination: LinearBlendingFactor,
},
/// For each individual component (red, green, blue, and alpha), a weighted substraction
/// of the source by the destination.
///
/// The result is equal to `source_component * source_factor - dest_component * dest_factor`,
/// where `source_factor` and `dest_factor` are the values of `source` and `destination` of
/// this enum.
Subtraction {
/// The factor to apply to the source pixel.
source: LinearBlendingFactor,
/// The factor to apply to the destination pixel.
destination: LinearBlendingFactor,
},
/// For each individual component (red, green, blue, and alpha), a weighted substraction
/// of the destination by the source.
///
/// The result is equal to `-source_component * source_factor + dest_component * dest_factor`,
/// where `source_factor` and `dest_factor` are the values of `source` and `destination` of
/// this enum.
ReverseSubtraction {
/// The factor to apply to the source pixel.
source: LinearBlendingFactor,
/// The factor to apply to the destination pixel.
destination: LinearBlendingFactor,
},
}
/// Indicates which value to multiply each component with.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LinearBlendingFactor {
/// Multiply the source or destination component by zero, which always
/// gives `0.0`.
Zero,
/// Multiply the source or destination component by one, which always
/// gives you the original value.
One,
/// Multiply the source or destination component by its corresponding value
/// in the source.
///
/// If you apply this to the source components, you get the values squared.
SourceColor,
/// Equivalent to `1 - SourceColor`.
OneMinusSourceColor,
/// Multiply the source or destination component by its corresponding value
/// in the destination.
///
/// If you apply this to the destination components, you get the values squared.
DestinationColor,
/// Equivalent to `1 - DestinationColor`.
OneMinusDestinationColor,
/// Multiply the source or destination component by the alpha value of the source.
SourceAlpha,
/// Multiply the source or destination component by `1.0` minus the alpha value of the source.
OneMinusSourceAlpha,
/// Multiply the source or destination component by the alpha value of the destination.
DestinationAlpha,
/// Multiply the source or destination component by `1.0` minus the alpha value of the
/// destination.
OneMinusDestinationAlpha,
}
impl ToGlEnum for LinearBlendingFactor {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
LinearBlendingFactor::Zero => gl::ZERO,
LinearBlendingFactor::One => gl::ONE,
LinearBlendingFactor::SourceColor => gl::SRC_COLOR,
LinearBlendingFactor::OneMinusSourceColor => gl::ONE_MINUS_SRC_COLOR,
LinearBlendingFactor::DestinationColor => gl::DST_COLOR,
LinearBlendingFactor::OneMinusDestinationColor => gl::ONE_MINUS_DST_COLOR,
LinearBlendingFactor::SourceAlpha => gl::SRC_ALPHA,
LinearBlendingFactor::OneMinusSourceAlpha => gl::ONE_MINUS_SRC_ALPHA,
LinearBlendingFactor::DestinationAlpha => gl::DST_ALPHA,
LinearBlendingFactor::OneMinusDestinationAlpha => gl::ONE_MINUS_DST_ALPHA,
}
}
}
/// Describes how triangles should be filtered before the fragment processing. Backface culling
/// is purely an optimization. If you don't know what this does, just use `CullingDisabled`.
///
/// # Backface culling
///
/// After the vertex shader stage, the GPU knows the 2D coordinates of each vertex of
/// each triangle.
///
/// For a given triangle, there are only two situations:
///
/// - The vertices are arranged in a clockwise direction on the screen.
/// - The vertices are arranged in a counterclockwise direction on the screen.
///
/// If you wish so, you can ask the GPU to discard all the primitives that belong to one
/// of these two categories.
///
/// ## Example
///
/// The vertices of this triangle are counter-clock-wise.
///
/// <svg width="556.84381" height="509.69049" version="1.1">
/// <g transform="translate(-95.156215,-320.37201)">
/// <path style="fill:none;stroke:#000000;stroke-width:4;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" d="M 324.25897,418.99654 539.42145,726.08292 212.13204,741.23521 z" />
/// <text style="font-size:40px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans" x="296.98483" y="400.81378"><tspan x="296.98483" y="400.81378">1</tspan></text>
/// <text style="font-size:40px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans" x="175.22902" y="774.8031"><tspan x="175.22902" y="774.8031">2</tspan></text>
/// <text style="font-size:40px;font-style:normal;font-weight:normal;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;font-family:Sans" x="555.58386" y="748.30627"><tspan x="555.58386" y="748.30627">3</tspan></text>
/// </g>
/// </svg>
///
/// # Usage
///
/// The trick is that if you make a 180° rotation of a shape, all triangles that were
/// clockwise become counterclockwise and vice versa.
///
/// Therefore you can arrange your model so that the triangles that are facing the screen
/// are all either clockwise or counterclockwise, and all the triangle are *not* facing
/// the screen are the other one.
///
/// By doing so you can use backface culling to discard all the triangles that are not
/// facing the screen, and increase your framerate.
///
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BackfaceCullingMode {
/// All triangles are always drawn.
CullingDisabled,
/// Triangles whose vertices are counterclockwise won't be drawn.
CullCounterClockWise,
/// Triangles whose vertices are clockwise won't be drawn.
CullClockWise
}
/// The function that the GPU will use to determine whether to write over an existing pixel
/// on the target.
///
/// # Depth buffers
///
/// After the fragment shader has been run, the GPU maps the output Z coordinates to the depth
/// range (which you can specify in the draw parameters) in order to obtain the depth value in
/// in window coordinates. This depth value is always between `0.0` and `1.0`.
///
/// In addition to the buffer where pixel colors are stored, you can also have a buffer
/// which contains the depth value of each pixel. Whenever the GPU tries to write a pixel,
/// it will first compare the depth value of the pixel to be written with the depth value that
/// is stored at this location.
///
/// If you don't have a depth buffer available, you can only pass `Overwrite`. Glium detects if
/// you pass any other value and reports an error.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DepthFunction {
/// Never replace the target pixel.
///
/// This option doesn't really make sense, but is here for completeness.
Ignore,
/// Always replace the target pixel.
///
/// This is the default mode.
Overwrite,
/// Replace if the z-value of the source is equal to the destination.
IfEqual,
/// Replace if the z-value of the source is different than the destination.
IfNotEqual,
/// Replace if the z-value of the source is more than the destination.
IfMore,
/// Replace if the z-value of the source is more than, or equal to the destination.
IfMoreOrEqual,
/// Replace if the z-value of the source is less than the destination.
IfLess,
/// Replace if the z-value of the source is less than, or equal to the destination.
IfLessOrEqual
}
impl DepthFunction {
/// Returns true if the function requires a depth buffer to be used.
pub fn requires_depth_buffer(&self) -> bool {
match *self {
DepthFunction::Ignore => true,
DepthFunction::Overwrite => false,
DepthFunction::IfEqual => true,
DepthFunction::IfNotEqual => true,
DepthFunction::IfMore => true,
DepthFunction::IfMoreOrEqual => true,
DepthFunction::IfLess => true,
DepthFunction::IfLessOrEqual => true,
}
}
}
impl ToGlEnum for DepthFunction {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
DepthFunction::Ignore => gl::NEVER,
DepthFunction::Overwrite => gl::ALWAYS,
DepthFunction::IfEqual => gl::EQUAL,
DepthFunction::IfNotEqual => gl::NOTEQUAL,
DepthFunction::IfMore => gl::GREATER,
DepthFunction::IfMoreOrEqual => gl::GEQUAL,
DepthFunction::IfLess => gl::LESS,
DepthFunction::IfLessOrEqual => gl::LEQUAL,
}
}
}
/// Defines how the device should render polygons.
///
/// The usual value is `Fill`, which fills the content of polygon with the color. However other
/// values are sometimes useful, especially for debugging purposes.
///
/// # Example
///
/// The same triangle drawn respectively with `Fill`, `Line` and `Point` (barely visible).
///
/// <svg width="890.26135" height="282.59375" version="1.1">
/// <g transform="translate(0,-769.9375)">
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="M 124.24877,771.03979 258.59906,1051.8622 0,1003.3749 z" />
/// <path style="fill:none;fill-opacity:1;stroke:#ff0000;stroke-opacity:1" d="M 444.46713,771.03979 578.81742,1051.8622 320.21836,1003.3749 z" />
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="m 814.91074,385.7662 c 0,0.0185 -0.015,0.0335 -0.0335,0.0335 -0.0185,0 -0.0335,-0.015 -0.0335,-0.0335 0,-0.0185 0.015,-0.0335 0.0335,-0.0335 0.0185,0 0.0335,0.015 0.0335,0.0335 z" transform="matrix(18.833333,0,0,18.833333,-14715.306,-6262.0056)" />
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="m 814.91074,385.7662 c 0,0.0185 -0.015,0.0335 -0.0335,0.0335 -0.0185,0 -0.0335,-0.015 -0.0335,-0.0335 0,-0.0185 0.015,-0.0335 0.0335,-0.0335 0.0185,0 0.0335,0.015 0.0335,0.0335 z" transform="matrix(18.833333,0,0,18.833333,-14591.26,-6493.994)" />
/// <path style="fill:#ff0000;fill-opacity:1;stroke:none" d="m 814.91074,385.7662 c 0,0.0185 -0.015,0.0335 -0.0335,0.0335 -0.0185,0 -0.0335,-0.015 -0.0335,-0.0335 0,-0.0185 0.015,-0.0335 0.0335,-0.0335 0.0185,0 0.0335,0.015 0.0335,0.0335 z" transform="matrix(18.833333,0,0,18.833333,-14457.224,-6213.6135)" />
/// </g>
/// </svg>
///
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PolygonMode {
/// Only draw a single point at each vertex.
///
/// All attributes that apply to points are used when using this mode.
Point,
/// Only draw a line in the boundaries of each polygon.
///
/// All attributes that apply to lines (`line_width`) are used when using this mode.
Line,
/// Fill the content of the polygon. This is the default mode.
Fill,
}
impl ToGlEnum for PolygonMode {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
PolygonMode::Point => gl::POINT,
PolygonMode::Line => gl::LINE,
PolygonMode::Fill => gl::FILL,
}
}
}
/// Represents the parameters to use when drawing.
///
/// Example:
///
/// ```
/// let params = glium::DrawParameters {
/// depth_function: glium::DepthFunction::IfLess,
/// .. std::default::Default::default()
/// };
/// ```
///
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct DrawParameters {
/// The function that the GPU will use to determine whether to write over an existing pixel
/// on the target.
///
/// See the `DepthFunction` documentation for more details.
///
/// The default is `Overwrite`.
pub depth_function: DepthFunction,
/// The range of possible Z values in surface coordinates.
///
/// Just like OpenGL turns X and Y coordinates between `-1.0` and `1.0` into surface
/// coordinates, it will also map your Z coordinates to a certain range which you can
/// specify here.
///
/// The two values must be between `0.0` and `1.0`, anything outside this range will result
/// in a panic. By default the depth range is `(0.0, 1.0)`.
///
/// The first value of the tuple must be the "near" value, where `-1.0` will be mapped.
/// The second value must be the "far" value, where `1.0` will be mapped.
/// It is possible for the "near" value to be greater than the "far" value.
pub depth_range: (f32, f32),
/// The function that the GPU will use to merge the existing pixel with the pixel that is
/// being written.
///
/// `None` means "don't care" (usually when you know that the alpha is always 1).
pub blending_function: Option<BlendingFunction>,
/// Width in pixels of the lines to draw when drawing lines.
///
/// `None` means "don't care". Use this when you don't draw lines.
pub line_width: Option<f32>,
/// Whether or not the GPU should filter out some faces.
///
/// After the vertex shader stage, the GPU will try to remove the faces that aren't facing
/// the camera.
///
/// See the `BackfaceCullingMode` documentation for more infos.
pub backface_culling: BackfaceCullingMode,
/// How to render polygons. The default value is `Fill`.
///
/// See the documentation of `PolygonMode` for more infos.
pub polygon_mode: PolygonMode,
/// Whether multisample antialiasing (MSAA) should be used. Default value is `true`.
///
/// Note that you will need to set the appropriate option when creating the window.
/// The recommended way to do is to leave this to `true`, and adjust the option when
/// creating the window.
pub multisampling: bool,
/// Whether dithering is activated. Default value is `true`.
///
/// Dithering will smoothen the transition between colors in your color buffer.
pub dithering: bool,
/// The viewport to use when drawing.
///
/// The X and Y positions of your vertices are mapped to the viewport so that `(-1, -1)`
/// corresponds to the lower-left hand corner and `(1, 1)` corresponds to the top-right
/// hand corner. Any pixel outside of the viewport is discarded.
///
/// You can specify a viewport greater than the target if you want to stretch the image.
///
/// `None` means "use the whole surface".
pub viewport: Option<Rect>,
/// If specified, only pixels in this rect will be displayed. Default is `None`.
///
/// This is different from a viewport. The image will stretch to fill the viewport, but
/// not the scissor box.
pub scissor: Option<Rect>,
}
impl std::default::Default for DrawParameters {
fn default() -> DrawParameters {
DrawParameters {
depth_function: DepthFunction::Overwrite,
depth_range: (0.0, 1.0),
blending_function: Some(BlendingFunction::AlwaysReplace),
line_width: None,
backface_culling: BackfaceCullingMode::CullingDisabled,
polygon_mode: PolygonMode::Fill,
multisampling: true,
dithering: true,
viewport: None,
scissor: None,
}
}
}
impl DrawParameters {
/// Checks parameters and panics if something is wrong.
fn validate(&self) -> Result<(), DrawError> {
if self.depth_range.0 < 0.0 || self.depth_range.0 > 1.0 ||
self.depth_range.1 < 0.0 || self.depth_range.1 > 1.0
{
return Err(DrawError::InvalidDepthRange);
}
Ok(())
}
/// Synchronizes the parameters with the current ctxt.state.
fn sync(&self, ctxt: &mut context::CommandContext, surface_dimensions: (u32, u32)) {
// depth function
match self.depth_function {
DepthFunction::Overwrite => unsafe {
if ctxt.state.enabled_depth_test {
ctxt.gl.Disable(gl::DEPTH_TEST);
ctxt.state.enabled_depth_test = false;
}
},
depth_function => unsafe {
let depth_function = depth_function.to_glenum();
if ctxt.state.depth_func != depth_function {
ctxt.gl.DepthFunc(depth_function);
ctxt.state.depth_func = depth_function;
}
if !ctxt.state.enabled_depth_test {
ctxt.gl.Enable(gl::DEPTH_TEST);
ctxt.state.enabled_depth_test = true;
}
}
}
// depth range
if self.depth_range != ctxt.state.depth_range {
unsafe {
ctxt.gl.DepthRange(self.depth_range.0 as f64, self.depth_range.1 as f64);
}
ctxt.state.depth_range = self.depth_range;
}
// blending function
let blend_factors = match self.blending_function {
Some(BlendingFunction::AlwaysReplace) => unsafe {
if ctxt.state.enabled_blend {
ctxt.gl.Disable(gl::BLEND);
ctxt.state.enabled_blend = false;
}
None
},
Some(BlendingFunction::Min) => unsafe {
if ctxt.state.blend_equation != gl::MIN {
ctxt.gl.BlendEquation(gl::MIN);
ctxt.state.blend_equation = gl::MIN;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
None
},
Some(BlendingFunction::Max) => unsafe {
if ctxt.state.blend_equation != gl::MAX {
ctxt.gl.BlendEquation(gl::MAX);
ctxt.state.blend_equation = gl::MAX;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
None
},
Some(BlendingFunction::Addition { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_ADD {
ctxt.gl.BlendEquation(gl::FUNC_ADD);
ctxt.state.blend_equation = gl::FUNC_ADD;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
Some(BlendingFunction::Subtraction { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_SUBTRACT {
ctxt.gl.BlendEquation(gl::FUNC_SUBTRACT);
ctxt.state.blend_equation = gl::FUNC_SUBTRACT;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
Some(BlendingFunction::ReverseSubtraction { source, destination }) => unsafe {
if ctxt.state.blend_equation != gl::FUNC_REVERSE_SUBTRACT {
ctxt.gl.BlendEquation(gl::FUNC_REVERSE_SUBTRACT);
ctxt.state.blend_equation = gl::FUNC_REVERSE_SUBTRACT;
}
if !ctxt.state.enabled_blend {
ctxt.gl.Enable(gl::BLEND);
ctxt.state.enabled_blend = true;
}
Some((source, destination))
},
_ => None
};
if let Some((source, destination)) = blend_factors {
let source = source.to_glenum();
let destination = destination.to_glenum();
if ctxt.state.blend_func != (source, destination) {
unsafe { ctxt.gl.BlendFunc(source, destination) };
ctxt.state.blend_func = (source, destination);
}
};
// line width
if let Some(line_width) = self.line_width {
if ctxt.state.line_width != line_width {
unsafe {
ctxt.gl.LineWidth(line_width);
ctxt.state.line_width = line_width;
}
}
}
// back-face culling
// note: we never change the value of `glFrontFace`, whose default is GL_CCW
// that's why `CullClockWise` uses `GL_BACK` for example
match self.backface_culling {
BackfaceCullingMode::CullingDisabled => unsafe {
if ctxt.state.enabled_cull_face {
ctxt.gl.Disable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = false;
}
},
BackfaceCullingMode::CullCounterClockWise => unsafe {
if !ctxt.state.enabled_cull_face {
ctxt.gl.Enable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = true;
}
if ctxt.state.cull_face != gl::FRONT {
ctxt.gl.CullFace(gl::FRONT);
ctxt.state.cull_face = gl::FRONT;
}
},
BackfaceCullingMode::CullClockWise => unsafe {
if !ctxt.state.enabled_cull_face {
ctxt.gl.Enable(gl::CULL_FACE);
ctxt.state.enabled_cull_face = true;
}
if ctxt.state.cull_face != gl::BACK {
ctxt.gl.CullFace(gl::BACK);
ctxt.state.cull_face = gl::BACK;
}
},
}
// polygon mode
unsafe {
let polygon_mode = self.polygon_mode.to_glenum();
if ctxt.state.polygon_mode != polygon_mode {
ctxt.gl.PolygonMode(gl::FRONT_AND_BACK, polygon_mode);
ctxt.state.polygon_mode = polygon_mode;
}
}
// multisampling
if ctxt.state.enabled_multisample != self.multisampling {
unsafe {
if self.multisampling {
ctxt.gl.Enable(gl::MULTISAMPLE);
ctxt.state.enabled_multisample = true;
} else {
ctxt.gl.Disable(gl::MULTISAMPLE);
ctxt.state.enabled_multisample = false;
}
}
}
// dithering
if ctxt.state.enabled_dither != self.dithering {
unsafe {
if self.dithering {
ctxt.gl.Enable(gl::DITHER);
ctxt.state.enabled_dither = true;
} else {
ctxt.gl.Disable(gl::DITHER);
ctxt.state.enabled_dither = false;
}
}
}
// viewport
if let Some(viewport) = self.viewport {
assert!(viewport.width <= ctxt.capabilities.max_viewport_dims.0 as u32,
"Viewport dimensions are too large");
assert!(viewport.height <= ctxt.capabilities.max_viewport_dims.1 as u32,
"Viewport dimensions are too large");
let viewport = (viewport.left as gl::types::GLint, viewport.bottom as gl::types::GLint,
viewport.width as gl::types::GLsizei,
viewport.height as gl::types::GLsizei);
if ctxt.state.viewport != viewport {
unsafe { ctxt.gl.Viewport(viewport.0, viewport.1, viewport.2, viewport.3); }
ctxt.state.viewport = viewport;
}
} else {
assert!(surface_dimensions.0 <= ctxt.capabilities.max_viewport_dims.0 as u32,
"Viewport dimensions are too large");
assert!(surface_dimensions.1 <= ctxt.capabilities.max_viewport_dims.1 as u32,
"Viewport dimensions are too large");
let viewport = (0, 0, surface_dimensions.0 as gl::types::GLsizei,
surface_dimensions.1 as gl::types::GLsizei);
if ctxt.state.viewport != viewport {
unsafe { ctxt.gl.Viewport(viewport.0, viewport.1, viewport.2, viewport.3); }
ctxt.state.viewport = viewport;
}
}
// scissor
if let Some(scissor) = self.scissor {
let scissor = (scissor.left as gl::types::GLint, scissor.bottom as gl::types::GLint,
scissor.width as gl::types::GLsizei,
scissor.height as gl::types::GLsizei);
unsafe {
if ctxt.state.scissor != scissor {
ctxt.gl.Scissor(scissor.0, scissor.1, scissor.2, scissor.3);
ctxt.state.scissor = scissor;
}
if !ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
}
} else {
unsafe {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
}
}
}
/// Area of a surface in pixels.
///
/// In the OpenGL ecosystem, the (0,0) coordinate is at the bottom-left hand corner of the images.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
pub struct Rect {
/// Number of pixels between the left border of the surface and the left border of
/// the rectangle.
pub left: u32,
/// Number of pixels between the bottom border of the surface and the bottom border
/// of the rectangle.
pub bottom: u32,
/// Width of the area in pixels.
pub width: u32,
/// Height of the area in pixels.
pub height: u32,
}
/// Object that can be drawn upon.
///
/// # What does the GPU do when you draw?
///
/// This is a summary of everything that happens when you call the `draw` function. Note that
/// this is not necessarly *exactly* what happens. Backends are free to do whatever they want
/// as long as it always matches the expected outcome.
///
/// ## Step 1: Vertex shader
///
/// For each vertex in the vertices source, the GPU invokes the vertex shader that is part
/// of the program, and passes the corresponding vertex's attributes to it.
///
/// The vertex shader *must* write the special `gl_Position` variable in order to indicate
/// the four-dimensions coordinates of the vertex. In order to understand what these coordinates
/// mean, see the "vertex post-processing" step below.
///
/// In addition to the position of the vertex, the vertex shader can also specify the values of
/// various vertex attributes.
///
/// ## Step 2: Tessellation (optional)
///
/// It is possible to use tessellation shaders, but glium does not support them yet.
///
/// ## Step 3: Geometry shader (optional)
///
/// If you specify a geometry shader, then the GPU will invoke it once for each primitive.
///
/// The geometry shader can output multiple primitives.
///
/// ## Step 4: Transform feedback (optional)
///
/// Transform feedback is not supported by glium for the moment.
///
/// ## Step 5: Vertex post-processing
///
/// The vertex shader step told the GPU what the coordinates of each vertex are, but these
/// coordinates have four dimensions, named `x`, `y`, `z` and `w`.
///
/// The GPU then computes the position of the vertex on the 2D surface you are drawing on, and
/// the depth of this vertex:
///
/// ```notrust
/// window_x = viewport_left + viewport_width * ((x / w) + 1.0) / 2.0
/// window_y = viewport_bottom + viewport_height * ((y / w) + 1.0) / 2.0
/// depth = depth_near + (depth_far - depth_near) * ((z / w) + 1.0) / 2.0
/// ```
///
/// *`viewport_left`, `viewport_width`, `viewport_bottom` and `viewport_height` correspond to
/// the `viewport` member of the draw parameters, and `depth_near` and `depth_far` correspond
/// to the `depth_range` member*.
///
/// This means that if `x / w`, `y / w` or `z / w` are equal to `-1.0`, then the result will be
/// `viewport_left`, `viewport_bottom` or `depth_near`. If they are equal to `1.0`, the result
/// will be `viewport_left + viewport_width` (the right of the viewport),
/// `viewport_bottom + viewport_height` (the top of the viewport) or `depth_far`.
///
/// For example if you want to draw a rectangle that covers the whole screen, it should be made
/// of four vertices whose coordinates are `(-1.0, -1.0, 0.0, 1.0)` (bottom-left corner),
/// `(-1.0, 1.0, 0.0, 1.0)` (top-left corner), `(1.0, 1.0, 0.0, 1.0)` (top-right corner) and
/// `(1.0, -1.0, 0.0, 1.0)` (bottom-right corner).
///
/// ## Step 6: Primitive assembly
///
/// The next step consists in building the primitives. Triangle strips, triangle fans and line
/// strips are turned into individual triangles or lines.
///
/// Triangle strips obey certain rules for the order of indices. For example the triangle strip
/// `0, 1, 2, 3, 4, 5` does *not* correspond to `0, 1, 2`, `1, 2, 3`, `2, 3, 4`, `3, 4, 5` as you
/// would expect, but to `0, 1, 2`, `1, 3, 2`, `2, 3, 4`, `3, 5, 4` (some indices are reversed).
/// This is important with regards to the face culling step below.
///
/// Then, if you did specify `PrimitiveMode`, it is used. If you specified `Line`, triangles are
/// turned into lines. If specified `Point`, triangles and lines are turned into points.
///
/// The GPU then looks at the screen coordinates of each primitive, and discards primitives that
/// are entirely outside of the window.
///
/// Note that points whose centers are outside of the viewport are discarded, even if the point
/// width would be big enough for the point to be visible. However this standard behavior is not
/// respected by nVidia drivers, which show the points anyway.
///
/// ## Step 7: Face culling (triangles only)
///
/// This step is purely an optimization step and only concerns triangles.
///
/// If you specify a value for `backface_culling` other than `CullingDisabled`, the GPU will
/// discard triangles depending on the way that the vertices are arranged on the window. You can
/// either discard triangles whose vertices are clockwise or counterclockwise.
///
/// For more information, see the `BackfaceCullingMode` documentation.
///
/// ## Step 8: Rasterization
///
/// Now that the GPU knows where on the window the various triangles, points or lines are, it will
/// determine which pixels of the surface are part of each primitive.
///
/// For points and lines, this step depends on the points width and line width that you specified
/// in the draw parameters.
///
/// <img alt="" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAnYAAAEsCAYAAABOqf71AAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAxOAAAMTgF/d4wjAAAAB3RJTUUH3wEIDBAooQVGygAAG4JJREFUeNrt3XuQXGWZx/HvmftMLjOZJJCLuZAEuYgJlIBKFBGQoNwCtQvlemPRVVaLhRUtlHG31KUVd7HEUgvwDy9LqaB7UVxL3dJV8a6sV5DLCkmASAwJyYRkMvfeP96ezJlmksxkumfOe/r7qUqR0wmZp5/z9ulfn/f0e0CSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEmSJEnSDEq64BLgFmBNhPUXgcTdqByKdWxvTuBvb4JvuQsP70Z4VQK3Aic6RiXH9hRtBa6tAz4RaajDg4ry/KEr0rpXFuEmd9+Ed/JHIw11Hn/l8Td7lgIfqQOWuQ8lVdAaWzBhy22BpApa3VD2wH0JfDeCwk8twjmp7Z8l8IOsF12E15cS9chHgluBvgjqvh4YGSt9pbozbRgWJnBVqtebgC9HUPeGBE5OPXRPAg9GMEY2MLZuTd79CXwjgn19AnBx6qHfJfDNCOq+HDhmZLsPvliEnVmvuwXeCjSXNgdLZ3mzrrkI16W2/5TAnRGMkVcAL0m9b3wXuC+2uhvK/vDHBXhP1p9EVxgw6WD3/ZvgvVmv+0Z4WZIKdg3wgffD7gj6fe3IWClCbwxj5EZYSyrYFeGRSOpuTwekInyxAHdHUHdHYrCbqv+9KY4xekWSCnZF+GUk7xsnp4PdZvj0v8EfIuj3m5JUsIthjLwfOgbGBrsnbopjjHw4HZCA/7opghMZ74Obi6m66zyWSpIk5YPBTpIkyWAnSZIkg50kSZIMdpIkSTLYSZIkGewkSZJksJMkSZLBTpIkSQY7SZIkg50kSZIMdpIkSTLYSZIkyWAnSZIkg50kSZLBTpIkSQY7SZIkGewkSZJksJMkSTLYSZIkyWAnSZIkg50kSZIMdpIkSQY7SZIkGewkSZJksJMkSZLBTpIkSQY7SZIkg50kSZIMdpIkSTLYCWgFGm2DJEky2MXveOBioNlWSJIkg13c1gBHAxuBNtshSZIMdnGaCyws/X4+cCkwx7ZIkiSDXXxWl223A5cB82yNJEky2MUd7ABmEc7cHWV7JEmSwS4Ocw4R3lqAS4AltkmSJBnssm/1Yf68EbgIWGmrJEmSwS7uYAdQD5wPHGu7JEky2CmbZhOWOJnofjwXOMm2SZJUu5IuKKa2HwB+FkHdJwEvTm3/Frgv60UX4cJkbFi7E+gf7+9ugs5NEw92B6yAp1fDjgqXfiXhzCCleu+MoNedSfiCyYitwLciqPuMBE5IPfQ94LEIXpPrCQtpA3QXoMPD6+F1wW7CN91J4P+KcG8EZa8CXpnafhj4UQR1nw8sZfRA9g1C/zOtCS6ndOehIgwl8LkIet0EvCG1/TRwTwR1nwqsS23/HLg/grpPA9YeLNgpIx4C9h3h/7soffSSpl9fIXy5R4cPdr14RxlJFVSHwS5z+qcQ6gC2AVtso2ZOgy2wV5JmRLEOSOxDtlRibmAHYf7O1K4Z+mwieyVp+iXlnxa/CXw+gsJfA7wxtf014EsR1P0B4LgDsRrenIxzcu7XcPreClyjtDmEux2vgN80wvAU/ql/JVwzAdADXBVBr1cAH0lt/xb4cAR1vxl4VWr748BPI6j7LYQv8BhWJh/sWku//wFwWwQ1vxS4NvUEftQdjhGZ1gnX1sMLRrZ/AV/pgZ1Zr/tMuKqudPwdhoGn4Lqs19wArUfDLamH/gi8L4Kx/VrC+rAjPl/KRVHVXR7sHinA3Vl/Bl2wuCzYPRhJ3dekg10C/1F47gm6WYR7wlbMz8N1PN840jfcrrEX6w5E0uu1ZcFuWyR1n1UW7H4aSd2vTAU7HeFnsUj2NelgNwRbPg1fzXrdN4Q3vwPB7lG4/zF4Iut1vxzelNoc/hx8Pes1r4W5F40NdjsjGdsnlwW730RS9ynpul3uJHtWV+HfXAxsTJ0ZkCRJOWSwq41gB7CAsPzHbFssSZLBTtXXRlitpFo6gMsorZslSZIMdqqe1VT/W8qzS+Fuge2WJMlgp+oGu+nQSrjQcpEtlyTJYKfKayN8yWG6NAMXA8ttvSRJBjtV1iqmf7HoBsKagKttvyRJBjtVzuoZHAPnMfbG85IkyWCnI9QKLJnBn58QFphd566QJMlgp6mZiWnY8awHTnd3SJJksNORW5OhWk4FXu4ukSTJYKfJm+lp2PG8kHDfT8eHJEkGO03CMWRjGrbc84HzgXp3kSRJBjtNzJoM17YSuHDYfSRJksFOh/YAtJC9adhySx+G+kF3lyRJBjsd3K9gRQz7YB/wMDDgLpMkyWCn8e0J19dFobcU7vqyeT2gJEky2M2cQaA/+9OwY/QBD0Ij0OkelCTJYKeS3eE/0Z39Kk3HXgoc5V6UJMlgJ2BX3OU3A5cAS92TkiQZ7GraEPBs/E+jEbiQiK4TlCTJYKeK2w0U8/FU6oENwHHuVUmSDHY1aVf+xtA5hNuQSZIkg13tGAL25POpvRw41T0sSZLBrmbkaBp2PKcD693LkiQZ7GrCrvw/xXXAK3EhY0mSDHZ5NgjJntp4qicA5zm+JEky2OXWTmgp1s7TXQ1cADS45yVJMtjlzg5oqbGnvAy4mLCgsSRJMtjlRlN3bQacRYS7VLQ5BCRJMtjlxcpi7X6hYAHh/rJzHAaSJBns8mB1jT//9lK4m+dQkCTJYBezRmC5bWA2sBFYaCskSTLYxWol4b6qglbCNXeLbYUkSQa7GK22BWM0ARcBK2yFJEkGu5g4DTu+BuDVwBpbIUmSwS4WK3CR3kONv1cBJ9oKSZIMdjFwGvbQEuAs4BRbIUmSwS7LGvE6sol6KfAS2yBJ0pErnyJ8Xlc4e5J15ddlLc9i3b+BpQ/ACakUPeaWYktg9SD0RdDvJPWb+pVwbJV+zrEL4cXnwu/qYKq31V1Vtt0ZydheUrZ9YqR1a/IWRbKvx1w6UQ9HvxnWR3AQ6yh7E1m5CGZFUHdd+vevDx+CM63xuX2dG8nYLr8efk2MdSddU38D1UE8BuyyDZPWSVgfJrEVseothGVtdBhdsJ/au4e0pCpyKrZKhoFu23BEngEeLfVQUWqyBfZKksEuV7oNJlPu3x+BIVsRI2cBJvcZUJIqpvwauz2EEyZZN5cwY5fOAZma9dwBrYNl/a2H9iT12GDodTGCQdLJ6MxocXCaxsgu4A8wfDz0NE6+T02Mve5rP/DnCMb2fGBOavtpYF9kde/30DphvYRb7QHsDYeOzJtF6raAPdDTC7sjeNOY3wDNI9uPQvcQDGa97jXQWVc6/g5A8ckIxkgdJCtgwch2H/T3hWNZprXA3Kaxx99nSrko6+YR7sc+brD7bAGuy/oz6Ao1fiz10G0FeG/GAvNfE74Ve8BZcH1L6qL+n8CHeiJ4E9wAtyal5zIMvd+Bf5zOn39PyHj3TCbgdMFa4Leph+4twPkRjO3bgKtTD11TgLsjqPt24G2lTU+0Tly6V/9egCsj2NdXAHelgt2374APZr3u6+FTDXDGyPZP4VNbYGvW634P3ELpmtVhGPwivDOCT3ltV8MdqZMYD34CLs963dfA9U3w1tRD/1SAWyN4Td4M3JAK1qqC5eWhTlP+NHJZ+hOJJEl6LoNddbgoceXNAS4NHwYlSZLBbnrUE1brUOW1ARuBRbZCkiSD3XRwGra6moGLgGW2QpIkg121rbEFVdcIXMBz7y4hSZLBThXjNOz0jt0NwPG2QpIkg101LMNp2OmUAGcD62yFJEkGu0pzGnZmrAdOsw2SJIOdKsVp2Jl1GvAy2yBJMtipEpbhDb1n2lrgHEZvfyZJUk1psAUV46LE2XAc0NgH25rthSSpxnjGrnJ9XGkbMmPVV+HMYfsgSTLY6QgsIyycq4zYB0c/gnejlyQZ7DR5TsNmM9zxMDBgKyRJBjtNoofH2IZs2l8Kd/u9nlSSZLDTBDwPp2EzrQ/4HRwFzLMbkiSDnQ7FadgI9IczdpcSAp4kSQY7jds/b0QfjxbgEmCprZAkGexUzmnY+DQCF+LyNJIkg53KeLYuTvXA+cDzbYUkyWCnkd4Z7OLef+cCJ9kKSZLBTksJ12wpbmcCL7INkiSDXW3z27D58WLgDNsgSTLY1aYEp2Hz5mTgrNK+lSTJYFdDnIbNpxOBV/m6kCQZ7GqL07D5tQZ4Dd6CTJJksKsJTsPm33LgIqDJVkiSDHb5tgRotQ25txjY6L6WJBns8s1p2NqxgHB/2dm2QpJksMsfp2FrTwdwWem/kiQZ7HJkMdBmG2rObMKZuwW2QpJksMuPNbagZrUSrrlbbCskSQa7+DkNqybCt2WX2wpJksEubotwGlZhfbvX4JdoJEkGu6g5Dav06+Y8wp0qJEky2EXIaVilJYR7y55sKyRJBru4LAZm2QaN4wzgxbZBkmSwi4fXU+lQXgScaRskSQY7g53y4STgXF9TkiSDXbYtwmlYTczzgfOBelshSZoJSRcUU9vdwI4I6m5n7F0AdgM7q/GDNkPLNmiuxL/VAO0JNI5sD8LO4tj+Z1IjzCd8WQCgOFClXld0YEN9A8wb2S5C/yDsmY6fPRcGj4Oe+iPbtwvDP3HAn4G9Ebwm03XvKYTXqA6jKxxzR/r2LLA9grJnlT7wAjAM+4bhmawXXQ8LE2gZ2e6DXUUYzHrdLeG9Likdx4r9cYyRpBmOSh9/h2Fb1ouug45k7PF3RykXZd18Ure9LA92KvN7oN82aJLagGNDmK9F/YUKfRiqgWDXT+rDniRVIKDqYPYZ6nSEeoCHgYHafPpORU9cYgskVdJ4JxRiOYOXVLvuXRHWHGuv81h3L/AQ4cK75iOvO8Ze93ponbC+suNwdPu7GH5lvu66suPBUCT9rn9u3VGMkfqyMTIcyRgZ5+BbjODFmBwq2H28ANdl/Ul0hRo/lnro5gK8two/6g3AnEr9Y2fB9S2phY7vhXf3wP6s93sD3DpybeAw7P9veFfWa14MS9fBjSPbPfDgvfDJGSqnB7iHCVyH1AW3AVenHnptAe6O4DV5O/C20mbmr1vKkHSvPl+AKyPY11cAd41sb4HvfiGM20y7Bt43N7Wg+JfgQ1tga9brfg/cUg+tpU9MAx+N4D16HrS+HW4Z2d4Gj34G3p/1ut8Aly8P9wMfqfvDn4HPRTC23zUX/iYVUHUQR1Uy1KmmtQGXAkfbCklSNRnsDs57w6qSmoGLgefZCkmSwW76uSixKq0RuAA4xlZIkgx208dpWFVLPWER4+NshSTJYDc9PFunakqAc4AX2gpJksHOYKd8eDlwqm2QJBnsqqf8lk5SNZ0OrLcNkiSDXXV4tk7TbR1wNt6FQJI0RQ22wGCnTDgeaByGxE9bkqQj5XvIWAuAdtugmfpQ8StYMWwfJEkGu8q8sdoCzaRumPUIB+5jKUmSwW4KvNuEZtw+4GFgwFZIkgx2R8xpWGXG/lK42wUtdkOSZLCbvFW2QFnSB/wwLIcyz25Ikgx2k+M0rDJnIJyxu5SwvqIkSQa7CZgPdNgGZVQLcAmwxFZIkgx2h+e3YZV1TcCFwApbIUky2BnsFL8G4NXAsbZCkmSwG18nXpyuuF6z5wIvsBWSJIPdc3m2TrFJgFcAp9gKSZLBzmCnfHhp6ZckSQY7whRsp8NAETuFcPYusRWSpFoPdp6tUx68gHDdnWfgJclgV9NclFh5cSzhG7MNtkKSDHa1qAOnYZUvKwhr3TXZCkky2NUaz9Ypj5YQ7lLRYiskyWBXS7y+Tnm1kHB/2dm2QpIMdrWgg3B/WCmv5pXCXbutkCSDXd55tk61YE4p3PkhRpIMdgY7KQfagI3AIlshSQa7PGoHFrjrVUOagYuAZbZCkgx2eePZOtWiRuACx78kGewMdlJ+Xu/nAcfbCkky2OXBXMJSEFKtSoCzgXW2QpIMdrHzbJ0UrAdOtw2SZLAz2Enx2wXUE75YIUnKiVq6Yfgc4Ch3uWpUEXgK2ARsBrptiSQZ7GLmvWFVa/qBJ0phbgvQZ0skyWCXF07DqhY8SzgjtxnYCgzbEkky2OWN07DKs+2lILcJ2Gk7JMlgl3eerVOeDBGmWDeXfvXYEkkSQNIVLqoeUYyp9onW/RCwL3s1x9Rv657hsd1AuBdeB2Exxrps1723EM6S6zC6YA9je+Vry5qtO5JskdW6Gw6zI4h0AB3Qn51QN6m6Y+23dVdOSwhySQcwK64+u4TKxLX62rJm67bmSsr9VOwu3zgU0RFkdghztJuOaoVfbpFU1WD3LPBMBHXPBealtruB3eP9xR3QNhgWYp1x9TA3SfV8MOTOYgSDZF7qk0txMIK8nEB9fchHoWgYGArjO1PqodgOg/NgsAOGGqCzlO9GPE0c19B1MjqluN9D64T1Ak2lMdozHMHxN4HWOpifegJ9/bA363W3wdwGaEy92e0Oh+Fsmw3zk9LxdxiKPbAjgjGSzIIFI9tDMDAYQd31MKdh7PF3F+FyiaybV8pF4wa7zxTguqw/g65Q48dSD91WgPeO/5rgjVmp+yy4vgVWjWz/BAo9EbwJboBbk9IBcRh6vwP/kPWaF8PSdXAjo0njj/fCJzNS3h5Gv8X6FKmzNl1wG3B16u9eU4C7I3hN3g68bTRHa4IO9GoAvv4vcE3WC347bJwHnx7ZfgJ+8WX4bNbrfge8swNOGtn+Bvzzo+FLSJn2HrijHtpKJwMGPj76OsushTDrrXDnyPY+eOgTGXovPsTYvmYeXJV66IMFuDWC4+/NwA0HC3Z5s8r3DWXkzXs7o3d9eMaWSJKqIe/BzmVONFMGGV2SZAsuSSJJMthNySxgsbtY06iH0bXlniCsNydJksGuApyG1XTYyegU63bbIUky2FXHGnevqmCYcA/WzaVfz9oSSZLBrrragEXuXlVIH+E6uU2EKdZ+WyJJMthNn1XEuzq3sqGbsUuSuISHJMlgN0OchtVkFYFtjE6xetMSSZLBLgPa8NuwmpgBwtTqJsJUa68tkSQZ7LLFaVgdyl5Gz8ptxSVJJEkGu0xzUWKN0QZ0hmvmvkK496okSQa7CLQCS9ytNW9oNjy1HGindIf1EOwMdZIkg11EnIatXb2kliS5HE6wJZIkg13cnIatLbsYvV5uGy5JIkky2OVGK7DUXZprRcKacpsJZ+a6bYkkSfkMdsfgNGwe9TN2SZI+WyJJUv6DndOw+fEsY5ckGbYlkiTVSLDrC8/Dadi4bU+FuR22Q5KkGg12f4IOoM7dGZUh4EnCFOtmoMeWSJJksGN7CHbKvv2MnpV7Ahi0JZIkGewOGAS6YY67MrOeYfRbrNtxSRJJkgx2B7M7JAW/DZsdw4QlSUamWPfYEkmSDHYTsst9mAV9wOOlIPc4LkkiSZLBbrKGCOtiaEbsYXSK9SlckkSSJIPdVJSmYTVNZgHtIU/fRbh2TpIkGewqw2nYqhsEnlgJw+1AY3hsyFAnSZLBrqKG8Kr8KulhdEmSJ4HBBZ4YlSTJYFdNTsNW1E5Gv8W63XZIkmSwm1ZOw07JMOEerJtLv/wOiiRJBruZ0Qf1TsMeSdvYwuiSJP22RJIkg92MexI6nYadkG7GLkli2yRJMthlyzZY0OK+G08xtOfAFKsz1pIkGewyrWk3zFvkvhsxADxBOCu3Bei1JZIkGexisdJ7w7KX0bNyWwmrv0iSJINddFbX6L7aweiSJE87dCVJUuzBrhFYXiP7ZoixS5LsdbhKkqQ8BbuVQH2O90cv4Tq5TYTr5gYcopIkKa/BLo/TsLsYPSu3DZckkSRJNRDscjMNOxvoAF4EX/lhWCxYkiSppoLdCuK9BdoA8Pga2D0/9SSOhj0/dAxKkqQqBbvnd8EVWSz0f2DdLjgaYBYsS/9ZCxx9PLwoS/U2QW8nbF8MT6+AvfXhJN2ssr92WRfsi2Cc1KXHTFbHyDgfBNIWRVJ3+eUGL+2K41hSq99Wr5h6WPZ22Jj1Otvg1PR2OyzYAKdnve4maE9vr4W1x8LSrNedpK4rr4O6C+FlWa+5EZrLtjv+CjZkve5mWFX20CmRvG8cP2bMdEVwTdcw8NvSfzN+wKOj9KvV9ynVrv2F8HLQYXRBj4cLSZXUQAh2mV7wtzujoS4B5paCXHv4VCIJmmyBvZI0I4oNRHAXhyzd8LQhFeTmMnaOUhIAg7ZgUr2qtw2SKiQpv8buAeBnWapwCOoeh2OHUxmqGZa0hDXtAOiHrfvD+m9V0Qp9nbB3ITw7D/ZPIQlfSOk6wZI7Q/mZd2Xqzae/VHfWdQKXprafBL4dQd1nACektr8HPBZB3esZvc7D+xVPXC+l65GG4LE++GnWC26AFU2p67wG4dH+jL1vjKcFzq6DxamH/hN4JoIx8gZGz+wOAZ+LoOamUt0jngbuiaDuU4F1qe2fA/dHUPdpwNrUa3SM7xTguowVvJqyiy7XwSsXp4LdHnjgPvhaBX/mMPAUYW25TeFHTF0X/Kgs2P1dAXZnfcR0wetSwW5/Ad4SQc1ry4LdA5HUfVtZsLujAHdHUPftlF3Aq0l/iP3lx+Dvs17n22FjOtgNwM9iqPsGuLss2L2/AL+L4LX1F6lgNxDJcayjLNg9FkndHy4LdncV4NYI6r75UMEui6br23b9hLN+mwlry/X5ViNJkmLSEEF9K6r47+9h9KzcU2T/i7eSJEnRBrvlVP7Lpn8uBbnNxHF9hSRJUi6CXSWmYQcJF85vIky19rjbJUmSwW561ZP6gsQk9RDOyG0uhTqXX5AkSQa7GTTZadidjE6xbnfXSpIkg112HHIaNgHmEBYKPh7uvy+CJSEkSZJqMdgdbBq2D9jyAliynDHLtfe7KyVJksEum5YxuiBjN2OXJCkeAye56yRJkuIIdnMIt9bZTLZuFStJkmSwm6Tfu2skSZImp84WSJIkGewkSZJksJMkSZLBTpIkSQY7SZIkg50kSZIMdpIkSTLYSZIkyWAnSZJksJMkSZLBTpIkSQY7SZIkGewkSZJksJMkSTLYSZIkyWAnSZIkg50kSZIMdpIkSQY7SZIkGewkSZJksJMkSZLBTpIkyWAnSZIkg50kSZIMdpIkSTLYSZIkyWAnSZJksJMkSZLBTpIkSdWVdEExtf0b4AcR1H0KcGZq+z7gxxHU/ZfAktT27UBfBHW/A2go/b6vVHfWLQBel9reAnw1grrPBl6Y2v4m8EhkdXcXoMPD6+F1wW6gHWAYHhmA72e95npY3QDnjGwPwUODcG/W626C1yTwvNRDXwB2RDBMrgaaS78fBD4VQc3NpbpHbAPujqDu9cCpqe17gV/HVnd5sJOkqTLYHUGwk6RKqAMetQ2SKuhJWzBhf7QFkipoax1wA7A10ifg2UblVaxj+0Hg3e6+CXsfsNkxKjm2K+BR4Fp3nyRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRJkiRN3f8DIrOQLp3kqkAAAAAASUVORK5CYII=" />
///
/// The attributes of each vertex are being interpolated, and the GPU assigns a value for each
/// attribute for each pixel.
///
/// ## Step 9: Fragment shader
///
/// The GPU now executes the fragment shader once for each pixel of each primitive.
///
/// The vertex attributes that were interpolated at the previous step are passed to the fragment
/// shader.
///
/// The fragment shader must return the color to write by setting the value of `gl_FragColor`.
///
/// ## Step 10: Pixel ownership
///
/// This step is mostly an implementation detail. If the window you are drawing on is not on the
/// foreground, or if it is partially obstructed, then the pixels that are not on the
/// foreground will be discarded.
///
/// This is only relevant if you draw to the default framebuffer.
///
/// ## Step 11: Scissor test
///
/// If `scissor` has been specified, then all the pixels that are outside of this rect
/// are discarded.
///
/// ## Step 12: Multisampling
///
/// ## Step 13: Stencil test
///
/// Stencil tests are currently not supported by glium.
///
/// ## Step 14: Depth test
///
/// In addition to the colors, surfaces can also have a depth buffer attached to it. In this
/// situation, just like each pixel has a color, each pixel of the surface also has an associated
/// depth value.
///
/// If a depth buffer is present, the GPU will compare the depth value of the pixel currently
/// being processed, with the existing depth value. Depending on the value of `depth_function`
/// in the draw parameters, the depth test will either pass, in which case the pipeline
/// continues, or fail, in which case the pixel is discarded.
///
/// The purpose of this test is to avoid drawing elements that are in the background of the
/// scene over elements that are in the foreground.
///
/// See the documentation of `DepthFunction` for more informations.
///
/// ## Step 15: Blending
///
/// For each pixel to write, the GPU takes the RGBA color that the fragment shader has returned
/// and the existing RGBA color already written on the surface, and merges the two.
///
/// The way they are merged depends on the value of `blending_function`. This allows you to choose
/// how alpha colors are merged together.
///
/// See the documentation of `BlendingFunction` fore more informations.
///
/// ## Step 16: Dithering (optional)
///
/// ## Step 17: End
///
/// This is finally the step where colors are being written.
///
/// ## Missing steps
///
/// Some steps are missing because they are not supported by glium for the moment: dithering,
/// occlusion query updating, logic operations, sRGB conversion, write masks.
///
/// Instancing and multiple viewports are also missing, as they are not supported.
///
pub trait Surface: Sized {
/// Clears some attachments of the target.
fn clear(&mut self, color: Option<(f32, f32, f32, f32)>, depth: Option<f32>,
stencil: Option<i32>);
/// Clears the color attachment of the target.
fn clear_color(&mut self, red: f32, green: f32, blue: f32, alpha: f32) {
self.clear(Some((red, green, blue, alpha)), None, None);
}
/// Clears the depth attachment of the target.
fn clear_depth(&mut self, value: f32) {
self.clear(None, Some(value), None);
}
/// Clears the stencil attachment of the target.
fn clear_stencil(&mut self, value: i32) {
self.clear(None, None, Some(value));
}
/// Clears the color and depth attachments of the target.
fn clear_color_and_depth(&mut self, color: (f32, f32, f32, f32), depth: f32) {
self.clear(Some(color), Some(depth), None);
}
/// Clears the color and stencil attachments of the target.
fn clear_color_and_stencil(&mut self, color: (f32, f32, f32, f32), stencil: i32) {
self.clear(Some(color), None, Some(stencil));
}
/// Clears the depth and stencil attachments of the target.
fn clear_depth_and_stencil(&mut self, depth: f32, stencil: i32) {
self.clear(None, Some(depth), Some(stencil));
}
/// Clears the color, depth and stencil attachments of the target.
fn clear_all(&mut self, color: (f32, f32, f32, f32), depth: f32, stencil: i32) {
self.clear(Some(color), Some(depth), Some(stencil));
}
/// Returns the dimensions in pixels of the target.
fn get_dimensions(&self) -> (u32, u32);
/// Returns the number of bits of each pixel of the depth buffer.
///
/// Returns `None` if there is no depth buffer.
fn get_depth_buffer_bits(&self) -> Option<u16>;
/// Returns true if the surface has a depth buffer available.
fn has_depth_buffer(&self) -> bool {
self.get_depth_buffer_bits().is_some()
}
/// Returns the number of bits of each pixel of the stencil buffer.
///
/// Returns `None` if there is no stencil buffer.
fn get_stencil_buffer_bits(&self) -> Option<u16>;
/// Returns true if the surface has a stencil buffer available.
fn has_stencil_buffer(&self) -> bool {
self.get_stencil_buffer_bits().is_some()
}
/// Draws.
///
/// See above for what happens exactly when you draw.
///
/// # Panic
///
/// - Panics if the requested depth function requires a depth buffer and none is attached.
/// - Panics if the type of some of the vertex source's attributes do not match the program's.
/// - Panics if a program's attribute is not in the vertex source (does *not* panic if a
/// vertex's attribute is not used by the program).
/// - Panics if the viewport is larger than the dimensions supported by the hardware.
/// - Panics if the depth range is outside of `(0, 1)`.
/// - Panics if a value in the uniforms doesn't match the type requested by the program.
///
fn draw<'a, 'b, V, I, U>(&mut self, V, &I, program: &Program, uniforms: U,
draw_parameters: &DrawParameters) -> Result<(), DrawError> where
V: vertex::MultiVerticesSource<'b>, I: index::ToIndicesSource,
U: uniforms::Uniforms;
/// Returns an opaque type that is used by the implementation of blit functions.
fn get_blit_helper(&self) -> BlitHelper;
/// Copies a rectangle of pixels from this surface to another surface.
///
/// The `source_rect` defines the area of the source (`self`) that will be copied, and the
/// `target_rect` defines the area where the copied image will be pasted. If the source and
/// target areas don't have the same dimensions, the image will be resized to match.
/// The `filter` parameter is relevant only in this situation.
///
/// It is possible for the source and the target to be the same surface. However if the
/// rectangles overlap, then the behavior is undefined.
///
/// Note that there is no alpha blending, depth/stencil checking, etc. This function just
/// copies pixels.
#[experimental = "The name will likely change"]
fn blit_color<S>(&self, source_rect: &Rect, target: &S, target_rect: &Rect,
filter: uniforms::MagnifySamplerFilter) where S: Surface
{
ops::blit(self, target, gl::COLOR_BUFFER_BIT, source_rect, target_rect,
filter.to_glenum())
}
/// Copies the entire surface to a target surface. See `blit_color`.
#[experimental = "The name will likely change"]
fn blit_whole_color_to<S>(&self, target: &S, target_rect: &Rect,
filter: uniforms::MagnifySamplerFilter) where S: Surface
{
let src_dim = self.get_dimensions();
let src_rect = Rect { left: 0, bottom: 0, width: src_dim.0 as u32, height: src_dim.1 as u32 };
self.blit_color(&src_rect, target, target_rect, filter)
}
/// Copies the entire surface to the entire target. See `blit_color`.
#[experimental = "The name will likely change"]
fn fill<S>(&self, target: &S, filter: uniforms::MagnifySamplerFilter) where S: Surface {
let src_dim = self.get_dimensions();
let src_rect = Rect { left: 0, bottom: 0, width: src_dim.0 as u32, height: src_dim.1 as u32 };
let target_dim = target.get_dimensions();
let target_rect = Rect { left: 0, bottom: 0, width: target_dim.0 as u32, height: target_dim.1 as u32 };
self.blit_color(&src_rect, target, &target_rect, filter)
}
}
/// Error that can happen while drawing.
#[derive(Clone, Debug)]
pub enum DrawError {
/// A depth function has been requested but no depth buffer is available.
NoDepthBuffer,
/// The type of a vertex attribute in the vertices source doesn't match what the
/// program requires.
AttributeTypeMismatch,
/// One of the attributes required by the program is missing from the vertex format.
///
/// Note that it is perfectly valid to have an attribute in the vertex format that is
/// not used by the program.
AttributeMissing,
/// The viewport's dimensions are not supported by the backend.
ViewportTooLarge,
/// The depth range is outside of the `(0, 1)` range.
InvalidDepthRange,
/// The type of a uniform doesn't match what the program requires.
UniformTypeMismatch {
/// Name of the uniform you are trying to bind.
name: String,
/// The expected type.
expected: uniforms::UniformType,
},
/// Tried to bind a uniform buffer to a single uniform value.
UniformBufferToValue {
/// Name of the uniform you are trying to bind.
name: String,
},
/// Tried to bind a single uniform value to a uniform block.
UniformValueToBlock {
/// Name of the uniform you are trying to bind.
name: String,
},
/// The layout of the content of the uniform buffer does not match the layout of the block.
UniformBlockLayoutMismatch {
/// Name of the block you are trying to bind.
name: String,
},
/// The number of vertices per patch that has been requested is not supported.
UnsupportedVerticesPerPatch,
/// Trying to use tessellation, but this is not supported by the underlying hardware.
TessellationNotSupported,
/// Using a program which contains tessellation shaders, but without submitting patches.
TessellationWithoutPatches,
/// Trying to use a sampler, but they are not supported by the backend.
SamplersNotSupported,
/// When you use instancing, all vertices sources must have the same size.
InstancesCountMismatch,
/// If you don't use indices, then all vertices sources must have the same size.
VerticesSourcesLengthMismatch,
}
impl std::fmt::Display for DrawError {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match self {
&DrawError::NoDepthBuffer => write!(fmt, "A depth function has been requested but no \
depth buffer is available."),
&DrawError::AttributeTypeMismatch => write!(fmt, "The type of a vertex attribute in \
the vertices source doesn't match \
what the program requires."),
&DrawError::AttributeMissing => write!(fmt, "One of the attributes required by the \
program is missing from the vertex \
format."),
&DrawError::ViewportTooLarge => write!(fmt, "The viewport's dimensions are not \
supported by the backend."),
&DrawError::InvalidDepthRange => write!(fmt, "The depth range is outside of the \
`(0, 1)` range."),
&DrawError::UniformTypeMismatch { ref name, ref expected } => {
write!(fmt, "The type of a uniform doesn't match what the program requires.")
},
&DrawError::UniformBufferToValue { ref name } => write!(fmt, "Tried to bind a uniform \
buffer to a single \
uniform value."),
&DrawError::UniformValueToBlock { ref name } => {
write!(fmt, "Tried to bind a single uniform value to a uniform block.")
},
&DrawError::UniformBlockLayoutMismatch { ref name } => {
write!(fmt, "The layout of the content of the uniform buffer does not match \
the layout of the block.")
},
&DrawError::UnsupportedVerticesPerPatch => write!(fmt, "The number of vertices per \
patch that has been requested \
is not supported."),
&DrawError::TessellationNotSupported => write!(fmt, "Trying to use tessellation, but \
this is not supported by the \
underlying hardware."),
&DrawError::TessellationWithoutPatches => write!(fmt, "Using a program which contains \
tessellation shaders, but \
without submitting patches."),
&DrawError::SamplersNotSupported => write!(fmt, "Trying to use a sampler, but they are \
not supported by the backend."),
&DrawError::InstancesCountMismatch => write!(fmt, "When you use instancing, all \
vertices sources must have the \
same size"),
&DrawError::VerticesSourcesLengthMismatch => write!(fmt, "If you don't use indices, \
then all vertices sources \
must have the same size."),
}
}
}
#[doc(hidden)]
pub struct BlitHelper<'a>(&'a Arc<DisplayImpl>, Option<&'a fbo::FramebufferAttachments>);
/// Implementation of `Surface`, targeting the default framebuffer.
///
/// The back- and front-buffers are swapped when the `Frame` is destroyed. This operation is
/// instantaneous, even when vsync is enabled.
pub struct Frame {
display: Display,
dimensions: (u32, u32),
}
impl Frame {
/// Stop drawing, and swap the buffers.
pub fn finish(self) {
}
}
impl Surface for Frame {
fn clear(&mut self, color: Option<(f32, f32, f32, f32)>, depth: Option<f32>,
stencil: Option<i32>)
{
ops::clear(&self.display.context, None, color, depth, stencil);
}
fn get_dimensions(&self) -> (u32, u32) {
self.dimensions
}
fn get_depth_buffer_bits(&self) -> Option<u16> {
self.display.context.context.capabilities().depth_bits
}
fn get_stencil_buffer_bits(&self) -> Option<u16> {
self.display.context.context.capabilities().stencil_bits
}
fn draw<'a, 'b, V, I, U>(&mut self, vertex_buffer: V,
index_buffer: &I, program: &Program, uniforms: U,
draw_parameters: &DrawParameters) -> Result<(), DrawError>
where I: index::ToIndicesSource, U: uniforms::Uniforms,
V: vertex::MultiVerticesSource<'b>
{
use index::ToIndicesSource;
if draw_parameters.depth_function.requires_depth_buffer() && !self.has_depth_buffer() {
return Err(DrawError::NoDepthBuffer);
}
if let Some(viewport) = draw_parameters.viewport {
if viewport.width > self.display.context.context.capabilities().max_viewport_dims.0
as u32
{
return Err(DrawError::ViewportTooLarge);
}
if viewport.height > self.display.context.context.capabilities().max_viewport_dims.1
as u32
{
return Err(DrawError::ViewportTooLarge);
}
}
ops::draw(&self.display, None, vertex_buffer.build_vertices_source().as_mut_slice(),
index_buffer.to_indices_source(), program, uniforms, draw_parameters,
(self.dimensions.0 as u32, self.dimensions.1 as u32))
}
fn get_blit_helper(&self) -> BlitHelper {
BlitHelper(&self.display.context, None)
}
}
#[unsafe_destructor]
impl Drop for Frame {
fn drop(&mut self) {
self.display.context.context.swap_buffers();
}
}
/// Objects that can build a `Display` object.
pub trait DisplayBuild {
/// Build a context and a `Display` to draw on it.
///
/// Performs a compatibility check to make sure that all core elements of glium
/// are supported by the implementation.
fn build_glium(self) -> Result<Display, GliumCreationError>;
}
/// Error that can happen while creating a glium display.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum GliumCreationError {
/// An error has happened while creating the glutin window or headless renderer.
GlutinCreationError(glutin::CreationError),
/// The OpenGL implementation is too old.
IncompatibleOpenGl(String),
}
impl std::fmt::Display for GliumCreationError {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
let self_error = self as &std::error::Error;
formatter.write_str(self_error.description())
}
}
impl std::error::Error for GliumCreationError {
fn description(&self) -> &str {
match self {
&GliumCreationError::GlutinCreationError(_) => "Error while creating glutin window or headless renderer",
&GliumCreationError::IncompatibleOpenGl(_) => "The OpenGL implementation is too old to work with glium",
}
}
fn cause(&self) -> Option<&std::error::Error> {
match self {
&GliumCreationError::GlutinCreationError(ref err) => Some(err as &std::error::Error),
&GliumCreationError::IncompatibleOpenGl(_) => None,
}
}
}
impl std::error::FromError<glutin::CreationError> for GliumCreationError {
fn from_error(err: glutin::CreationError) -> GliumCreationError {
GliumCreationError::GlutinCreationError(err)
}
}
impl<'a> DisplayBuild for glutin::WindowBuilder<'a> {
fn build_glium(self) -> Result<Display, GliumCreationError> {
let context = try!(context::new_from_window(self, None));
let display = Display {
context: Arc::new(DisplayImpl {
context: context,
debug_callback: Mutex::new(None),
framebuffer_objects: Some(fbo::FramebuffersContainer::new()),
vertex_array_objects: Mutex::new(HashMap::new()),
samplers: Mutex::new(HashMap::new()),
}),
};
display.init_debug_callback();
Ok(display)
}
}
#[cfg(feature = "headless")]
impl DisplayBuild for glutin::HeadlessRendererBuilder {
fn build_glium(self) -> Result<Display, GliumCreationError> {
let context = try!(context::new_from_headless(self));
let display = Display {
context: Arc::new(DisplayImpl {
context: context,
debug_callback: Mutex::new(None),
framebuffer_objects: Some(fbo::FramebuffersContainer::new()),
vertex_array_objects: Mutex::new(HashMap::new()),
samplers: Mutex::new(HashMap::new()),
}),
};
display.init_debug_callback();
Ok(display)
}
}
/// The main object of this library. Controls the whole display.
///
/// This object contains a smart pointer to the real implementation.
/// Cloning the display allows you to easily share the `Display` object throughout
/// your program and between threads.
#[derive(Clone)]
pub struct Display {
context: Arc<DisplayImpl>,
}
struct DisplayImpl {
// contains everything related to the current context and its state
context: context::Context,
// the callback used for debug messages
debug_callback: Mutex<Option<Box<FnMut(String, debug::Source, debug::MessageType, debug::Severity)
+ Send + Sync>>>,
// we maintain a list of FBOs
// the option is here to destroy the container
framebuffer_objects: Option<fbo::FramebuffersContainer>,
// we maintain a list of VAOs for each vertexbuffer-indexbuffer-program association
// the key is a (buffers-list, program) ; the buffers list must be sorted
vertex_array_objects: Mutex<HashMap<(Vec<gl::types::GLuint>, Handle),
vertex_array_object::VertexArrayObject>>,
// we maintain a list of samplers for each possible behavior
samplers: Mutex<HashMap<uniforms::SamplerBehavior, sampler_object::SamplerObject>>,
}
impl Display {
/// Reads all events received by the window.
///
/// This iterator polls for events and can be exhausted.
pub fn poll_events(&self) -> PollEventsIter {
self.context.context.poll_events()
}
/// Reads all events received by the window.
pub fn wait_events(&self) -> WaitEventsIter {
self.context.context.wait_events()
}
/// Returns the dimensions of the main framebuffer.
pub fn get_framebuffer_dimensions(&self) -> (u32, u32) {
self.context.context.get_framebuffer_dimensions()
}
/// Start drawing on the backbuffer.
///
/// This function returns a `Frame`, which can be used to draw on it. When the `Frame` is
/// destroyed, the buffers are swapped.
///
/// Note that destroying a `Frame` is immediate, even if vsync is enabled.
pub fn draw(&self) -> Frame {
Frame {
display: self.clone(),
dimensions: self.get_framebuffer_dimensions(),
}
}
/// Returns the maximum value that can be used for anisotropic filtering, or `None`
/// if the hardware doesn't support it.
pub fn get_max_anisotropy_support(&self) -> Option<u16> {
self.context.context.capabilities().max_texture_max_anisotropy.map(|v| v as u16)
}
/// Returns the maximum dimensions of the viewport.
///
/// Glium will panic if you request a larger viewport than this when drawing.
pub fn get_max_viewport_dimensions(&self) -> (u32, u32) {
let d = self.context.context.capabilities().max_viewport_dims;
(d.0 as u32, d.1 as u32)
}
/// Releases the shader compiler, indicating that no new programs will be created for a while.
///
/// # Features
///
/// This method is always available, but is a no-op if it's not available in
/// the implementation.
pub fn release_shader_compiler(&self) {
self.context.context.exec(move |: ctxt| {
unsafe {
if ctxt.opengl_es || ctxt.version >= &context::GlVersion(4, 1) {
ctxt.gl.ReleaseShaderCompiler();
}
}
});
}
/// Returns an estimate of the amount of video memory available in bytes.
///
/// Returns `None` if no estimate is available.
pub fn get_free_video_memory(&self) -> Option<usize> {
let (tx, rx) = channel();
self.context.context.exec(move |: ctxt| {
unsafe {
use std::mem;
let mut value: [gl::types::GLint; 4] = mem::uninitialized();
let value = if ctxt.extensions.gl_nvx_gpu_memory_info {
ctxt.gl.GetIntegerv(gl::GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX,
&mut value[0]);
Some(value[0])
} else if ctxt.extensions.gl_ati_meminfo {
ctxt.gl.GetIntegerv(gl::TEXTURE_FREE_MEMORY_ATI, &mut value[0]);
Some(value[0])
} else {
None
};
tx.send(value).ok();
}
});
rx.recv().unwrap().map(|v| v as usize * 1024)
}
// TODO: do this more properly
fn init_debug_callback(&self) {
if cfg!(ndebug) {
return;
}
// this is the C callback
extern "system" fn callback_wrapper(source: gl::types::GLenum, ty: gl::types::GLenum,
id: gl::types::GLuint, severity: gl::types::GLenum, _length: gl::types::GLsizei,
message: *const gl::types::GLchar, user_param: *mut libc::c_void)
{
let message = unsafe {
String::from_utf8(std::ffi::c_str_to_bytes(&message).to_vec()).unwrap()
};
if severity == gl::DEBUG_SEVERITY_HIGH ||
severity == gl::DEBUG_SEVERITY_MEDIUM
{
panic!("Debug message with high or medium severity: `{}`.\n\
Please report this error: https://github.com/tomaka/glium/issues", message);
}
}
// enabling the callback
self.context.context.exec(move |: ctxt| {
unsafe {
if ctxt.version >= &context::GlVersion(4,5) || ctxt.extensions.gl_khr_debug ||
ctxt.extensions.gl_arb_debug_output
{
if ctxt.state.enabled_debug_output_synchronous != true {
ctxt.gl.Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);
ctxt.state.enabled_debug_output_synchronous = true;
}
if ctxt.version >= &context::GlVersion(4,5) || ctxt.extensions.gl_khr_debug {
// TODO: with GLES, the GL_KHR_debug function has a `KHR` suffix
// but with GL only, it doesn't have one
ctxt.gl.DebugMessageCallback(callback_wrapper, std::ptr::null());
ctxt.gl.DebugMessageControl(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE, 0,
std::ptr::null(), gl::TRUE);
if ctxt.state.enabled_debug_output != Some(true) {
ctxt.gl.Enable(gl::DEBUG_OUTPUT);
ctxt.state.enabled_debug_output = Some(true);
}
} else {
ctxt.gl.DebugMessageCallbackARB(callback_wrapper, std::ptr::null());
ctxt.gl.DebugMessageControlARB(gl::DONT_CARE, gl::DONT_CARE, gl::DONT_CARE,
0, std::ptr::null(), gl::TRUE);
ctxt.state.enabled_debug_output = Some(true);
}
}
}
});
}
/// Reads the content of the front buffer.
///
/// You will only see the data that has finished being drawn.
///
/// This function can return any type that implements `Texture2dData`.
///
/// ## Example
///
/// ```no_run
/// # extern crate glium;
/// # extern crate glutin;
/// # fn main() {
/// # let display: glium::Display = unsafe { ::std::mem::uninitialized() };
/// let pixels: Vec<Vec<(u8, u8, u8)>> = display.read_front_buffer();
/// # }
/// ```
pub fn read_front_buffer<P, T>(&self) -> T // TODO: remove Clone for P
where P: texture::PixelValue + Clone + Send,
T: texture::Texture2dData<Data = P>
{
ops::read_from_default_fb(gl::FRONT_LEFT, self)
}
/// Asserts that there are no OpenGL errors pending.
///
/// This function should be used in tests.
pub fn assert_no_error(&self) {
let (tx, rx) = channel();
self.context.context.exec(move |: mut ctxt| {
tx.send(get_gl_error(&mut ctxt)).ok();
});
match rx.recv().unwrap() {
Some(msg) => panic!("{}", msg),
None => ()
};
}
/// Waits until all the previous commands have finished being executed.
///
/// When you execute OpenGL functions, they are not executed immediately. Instead they are
/// put in a queue. This function waits until all commands have finished being executed, and
/// the queue is empty.
///
/// **You don't need to call this function manually, except when running benchmarks.**
pub fn synchronize(&self) {
let (tx, rx) = channel();
self.context.context.exec(move |: ctxt| {
unsafe { ctxt.gl.Finish(); }
tx.send(()).ok();
});
rx.recv().unwrap();
}
}
// this destructor is here because objects in `Display` contain an `Arc<DisplayImpl>`,
// which would lead to a leak
impl Drop for DisplayImpl {
fn drop(&mut self) {
// disabling callback
self.context.exec(move |: ctxt| {
unsafe {
if ctxt.state.enabled_debug_output != Some(false) {
if ctxt.version >= &context::GlVersion(4,5) || ctxt.extensions.gl_khr_debug {
ctxt.gl.Disable(gl::DEBUG_OUTPUT);
} else if ctxt.extensions.gl_arb_debug_output {
ctxt.gl.DebugMessageCallbackARB(std::mem::transmute(0us),
std::ptr::null());
}
ctxt.state.enabled_debug_output = Some(false);
ctxt.gl.Finish();
}
}
});
{
let fbos = self.framebuffer_objects.take();
fbos.unwrap().cleanup(&self.context);
}
{
let mut vaos = self.vertex_array_objects.lock().unwrap();
vaos.clear();
}
{
let mut samplers = self.samplers.lock().unwrap();
samplers.clear();
}
}
}
#[allow(dead_code)]
fn get_gl_error(ctxt: &mut context::CommandContext) -> Option<&'static str> {
match unsafe { ctxt.gl.GetError() } {
gl::NO_ERROR => None,
gl::INVALID_ENUM => Some("GL_INVALID_ENUM"),
gl::INVALID_VALUE => Some("GL_INVALID_VALUE"),
gl::INVALID_OPERATION => Some("GL_INVALID_OPERATION"),
gl::INVALID_FRAMEBUFFER_OPERATION => Some("GL_INVALID_FRAMEBUFFER_OPERATION"),
gl::OUT_OF_MEMORY => Some("GL_OUT_OF_MEMORY"),
gl::STACK_UNDERFLOW => Some("GL_STACK_UNDERFLOW"),
gl::STACK_OVERFLOW => Some("GL_STACK_OVERFLOW"),
_ => Some("Unknown glGetError return value")
}
}
|
#![feature(custom_derive, plugin)]
#![plugin(serde_macros)]
extern crate serde;
extern crate serde_json;
extern crate hyper;
pub mod request;
pub mod response;
pub use request::{get, post, put, head, delete};
pub type RequestsResult = hyper::Result<response::Response>;
#[cfg(test)]
mod test {
use super::*;
use hyper;
#[derive(Debug, Deserialize)]
struct Args;
#[derive(Debug, Deserialize)]
struct Headers {
#[serde(rename="Host")]
host: String,
#[serde(rename="User-Agent")]
user_agent: String,
}
#[derive(Debug, Deserialize)]
struct HttpBinData {
// args: Args,
headers: Headers,
origin: String,
url: String,
}
#[test]
fn simple_get() {
const URL: &'static str = "http://httpbin.org/get";
let res = get(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
let data: HttpBinData = res.from_json().unwrap();
assert_eq!(data.url, URL);
assert_eq!(data.headers.host, "httpbin.org");
assert_eq!(data.headers.user_agent, concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
}
#[test]
fn simple_post() {
const URL: &'static str = "http://httpbin.org/post";
let res = post(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn simple_put() {
const URL: &'static str = "http://httpbin.org/put";
let res = put(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn simple_head() {
const URL: &'static str = "http://httpbin.org/get";
let res = head(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn simple_delete() {
const URL: &'static str = "http://httpbin.org/delete";
let res = delete(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn user_agent() {
let useragent = concat!("{\n \"user-agent\": \"requests-rs/",
env!("CARGO_PKG_VERSION"),
"\"\n}\n");
const URL: &'static str = "http://httpbin.org/user-agent";
let res = get(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
assert_eq!(res.text(), Some(useragent));
}
#[test]
fn user_agent_json() {
#[derive(Debug, Deserialize)]
struct UserAgent {
#[serde(rename="user-agent")]
user_agent: String,
}
const URL: &'static str = "http://httpbin.org/user-agent";
let res = get(URL).unwrap();
assert!(res.is_json());
let ua: UserAgent = res.from_json().unwrap();
assert_eq!(ua.user_agent, concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
}
}
Migrate test from other crate
#![feature(custom_derive, plugin)]
#![plugin(serde_macros)]
extern crate serde;
extern crate serde_json;
extern crate hyper;
pub mod request;
pub mod response;
pub use request::{get, post, put, head, delete};
pub type RequestsResult = hyper::Result<response::Response>;
#[cfg(test)]
mod test {
use super::*;
use hyper;
#[derive(Debug, Deserialize)]
struct Args;
#[derive(Debug, Deserialize)]
struct Headers {
#[serde(rename="Host")]
host: String,
#[serde(rename="User-Agent")]
user_agent: String,
}
#[derive(Debug, Deserialize)]
struct HttpBinData {
// args: Args,
headers: Headers,
origin: String,
url: String,
}
#[test]
fn simple_get() {
const URL: &'static str = "http://httpbin.org/get";
let res = get(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
let data: HttpBinData = res.from_json().unwrap();
assert_eq!(data.url, URL);
assert_eq!(data.headers.host, "httpbin.org");
assert_eq!(data.headers.user_agent, concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
}
#[test]
fn simple_post() {
const URL: &'static str = "http://httpbin.org/post";
let res = post(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn simple_put() {
const URL: &'static str = "http://httpbin.org/put";
let res = put(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn simple_head() {
const URL: &'static str = "http://httpbin.org/get";
let res = head(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn simple_delete() {
const URL: &'static str = "http://httpbin.org/delete";
let res = delete(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
}
#[test]
fn user_agent() {
let useragent = concat!("{\n \"user-agent\": \"requests-rs/",
env!("CARGO_PKG_VERSION"),
"\"\n}\n");
const URL: &'static str = "http://httpbin.org/user-agent";
let res = get(URL).unwrap();
assert_eq!(res.url(), URL);
assert_eq!(res.status_code(), hyper::Ok);
assert_eq!(res.reason(), "OK");
assert_eq!(res.text(), Some(useragent));
}
#[test]
fn user_agent_json() {
#[derive(Debug, Deserialize)]
struct UserAgent {
#[serde(rename="user-agent")]
user_agent: String,
}
const URL: &'static str = "http://httpbin.org/user-agent";
let res = get(URL).unwrap();
assert!(res.is_json());
let ua: UserAgent = res.from_json().unwrap();
assert_eq!(ua.user_agent, concat!("requests-rs/", env!("CARGO_PKG_VERSION")));
}
use std::collections::HashMap;
#[derive(Deserialize, Debug)]
struct GenericResponse {
args: HashMap<String, String>,
data: Option<String>,
files: Option<HashMap<String, String>>,
form: Option<HashMap<String, String>>,
headers: HashMap<String, String>,
json: Option<String>,
origin: String,
url: String,
}
#[test]
fn generic_get() {
const URL: &'static str = "http://httpbin.org/get";
let res = get(URL).unwrap();
assert!(res.is_json());
println!("{}", res.text().unwrap());
let out = res.from_json::<GenericResponse>().unwrap();
println!("{:#?}", out);
assert_eq!(out.json, None);
assert_eq!(out.url, "http://httpbin.org/get");
}
}
|
//! # num_cpus
//!
//! A crate with utilities to determine the number of CPUs available on the
//! current system.
//!
//! ## Example
//!
//! ```
//! let cpus = num_cpus::get();
//! ```
#![cfg_attr(test, deny(warnings))]
#![deny(missing_docs)]
#![allow(non_snake_case)]
#[cfg(not(windows))]
extern crate libc;
/// Returns the number of available CPUs of the current system.
///
/// # Note
///
/// This will check sched affinity on Linux.
#[inline]
pub fn get() -> usize {
get_num_cpus()
}
/// Returns the number of physical cores of the current system.
///
/// If not possible on the particular architecture, returns same as `get()`
/// which is the logical CPUs.
#[inline]
pub fn get_physical() -> usize {
get_num_physical_cpus()
}
#[cfg(not(any(target_os = "linux", target_os = "windows", target_os="macos")))]
#[inline]
fn get_num_physical_cpus() -> usize {
// Not implemented, fallback
get_num_cpus()
}
#[cfg(target_os = "windows")]
fn get_num_physical_cpus() -> usize {
match get_num_physical_cpus_windows() {
Some(num) => num,
None => get_num_cpus()
}
}
#[cfg(target_os = "windows")]
fn get_num_physical_cpus_windows() -> Option<usize> {
// Inspired by https://msdn.microsoft.com/en-us/library/ms683194
use std::ptr;
use std::mem;
#[allow(non_upper_case_globals)]
const RelationProcessorCore: u32 = 0;
#[repr(C)]
#[allow(non_camel_case_types)]
struct SYSTEM_LOGICAL_PROCESSOR_INFORMATION {
mask: usize,
relationship: u32,
_unused: [u64; 2]
}
extern "system" {
fn GetLogicalProcessorInformation(
info: *mut SYSTEM_LOGICAL_PROCESSOR_INFORMATION,
length: &mut u32
) -> u32;
}
// First we need to determine how much space to reserve.
// The required size of the buffer, in bytes.
let mut needed_size = 0;
unsafe {
GetLogicalProcessorInformation(ptr::null_mut(), &mut needed_size);
}
let struct_size = mem::size_of::<SYSTEM_LOGICAL_PROCESSOR_INFORMATION>() as u32;
// Could be 0, or some other bogus size.
if needed_size == 0 || needed_size < struct_size || needed_size % struct_size != 0 {
return None;
}
let count = needed_size / struct_size;
// Allocate some memory where we will store the processor info.
let mut buf = Vec::with_capacity(count as usize);
let result;
unsafe {
result = GetLogicalProcessorInformation(buf.as_mut_ptr(), &mut needed_size);
}
// Failed for any reason.
if result == 0 {
return None;
}
let count = needed_size / struct_size;
unsafe {
buf.set_len(count as usize);
}
let phys_proc_count = buf.iter()
// Only interested in processor packages (physical processors.)
.filter(|proc_info| proc_info.relationship == RelationProcessorCore)
.count();
if phys_proc_count == 0 {
None
} else {
Some(phys_proc_count)
}
}
#[cfg(target_os = "linux")]
fn get_num_physical_cpus() -> usize {
use std::io::BufReader;
use std::io::BufRead;
use std::fs::File;
use std::collections::HashSet;
let file = match File::open("/proc/cpuinfo") {
Ok(val) => val,
Err(_) => {return get_num_cpus()},
};
let reader = BufReader::new(file);
let mut set = HashSet::new();
let mut coreid: u32 = 0;
let mut physid: u32 = 0;
let mut chgcount = 0;
for line in reader.lines().filter_map(|result| result.ok()) {
let parts: Vec<&str> = line.split(':').map(|s| s.trim()).collect();
if parts.len() != 2 {
continue
}
if parts[0] == "core id" || parts[0] == "physical id" {
let value = match parts[1].trim().parse() {
Ok(val) => val,
Err(_) => break,
};
match parts[0] {
"core id" => coreid = value,
"physical id" => physid = value,
_ => {},
}
chgcount += 1;
}
if chgcount == 2 {
set.insert((physid, coreid));
chgcount = 0;
}
}
let count = set.len();
if count == 0 { get_num_cpus() } else { count }
}
#[cfg(windows)]
fn get_num_cpus() -> usize {
#[repr(C)]
struct SYSTEM_INFO {
wProcessorArchitecture: u16,
wReserved: u16,
dwPageSize: u32,
lpMinimumApplicationAddress: *mut u8,
lpMaximumApplicationAddress: *mut u8,
dwActiveProcessorMask: *mut u8,
dwNumberOfProcessors: u32,
dwProcessorType: u32,
dwAllocationGranularity: u32,
wProcessorLevel: u16,
wProcessorRevision: u16,
}
extern "system" {
fn GetSystemInfo(lpSystemInfo: *mut SYSTEM_INFO);
}
unsafe {
let mut sysinfo: SYSTEM_INFO = std::mem::uninitialized();
GetSystemInfo(&mut sysinfo);
sysinfo.dwNumberOfProcessors as usize
}
}
#[cfg(any(target_os = "freebsd",
target_os = "dragonfly",
target_os = "bitrig",
target_os = "netbsd"))]
fn get_num_cpus() -> usize {
let mut cpus: libc::c_uint = 0;
let mut cpus_size = std::mem::size_of_val(&cpus);
unsafe {
cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint;
}
if cpus < 1 {
let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
unsafe {
libc::sysctl(mib.as_mut_ptr(),
2,
&mut cpus as *mut _ as *mut _,
&mut cpus_size as *mut _ as *mut _,
0 as *mut _,
0);
}
if cpus < 1 {
cpus = 1;
}
}
cpus as usize
}
#[cfg(target_os = "openbsd")]
fn get_num_cpus() -> usize {
let mut cpus: libc::c_uint = 0;
let mut cpus_size = std::mem::size_of_val(&cpus);
let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
unsafe {
libc::sysctl(mib.as_mut_ptr(),
2,
&mut cpus as *mut _ as *mut _,
&mut cpus_size as *mut _ as *mut _,
0 as *mut _,
0);
}
if cpus < 1 {
cpus = 1;
}
cpus as usize
}
#[cfg(target_os = "macos")]
fn get_num_physical_cpus() -> usize {
use std::ffi::CStr;
use std::ptr;
let mut cpus: i32 = 0;
let mut cpus_size = std::mem::size_of_val(&cpus);
let sysctl_name = CStr::from_bytes_with_nul(b"hw.physicalcpu\0")
.expect("byte literal is missing NUL");
unsafe {
if 0 != libc::sysctlbyname(sysctl_name.as_ptr(),
&mut cpus as *mut _ as *mut _,
&mut cpus_size as *mut _ as *mut _,
ptr::null_mut(),
0) {
return get_num_cpus();
}
}
cpus as usize
}
#[cfg(target_os = "linux")]
fn get_num_cpus() -> usize {
let mut set: libc::cpu_set_t = unsafe { std::mem::zeroed() };
if unsafe { libc::sched_getaffinity(0, std::mem::size_of::<libc::cpu_set_t>(), &mut set) } == 0 {
let mut count: u32 = 0;
for i in 0..libc::CPU_SETSIZE as usize {
if unsafe { libc::CPU_ISSET(i, &set) } {
count += 1
}
}
count as usize
} else {
let cpus = unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) };
if cpus < 1 {
1
} else {
cpus as usize
}
}
}
#[cfg(any(
target_os = "nacl",
target_os = "macos",
target_os = "ios",
target_os = "android",
target_os = "solaris",
target_os = "fuchsia")
)]
fn get_num_cpus() -> usize {
let cpus = unsafe { libc::sysconf(libc::_SC_NPROCESSORS_CONF) };
if cpus < 1 {
1
} else {
cpus as usize
}
}
#[cfg(any(target_os = "emscripten", target_os = "redox", target_os = "haiku"))]
fn get_num_cpus() -> usize {
1
}
#[cfg(test)]
mod tests {
fn env_var(name: &'static str) -> Option<usize> {
::std::env::var(name).ok().map(|val| val.parse().unwrap())
}
#[test]
fn test_get() {
let num = super::get();
if let Some(n) = env_var("NUM_CPUS_TEST_GET") {
assert_eq!(num, n);
} else {
assert!(num > 0);
assert!(num < 236_451);
}
}
#[test]
fn test_get_physical() {
let num = super::get_physical();
if let Some(n) = env_var("NUM_CPUS_TEST_GET_PHYSICAL") {
assert_eq!(num, n);
} else {
assert!(num > 0);
assert!(num < 236_451);
}
}
#[test]
fn test_physical_less_logical() {
let logical = super::get();
let physical = super::get_physical();
println!("physical: {:?}, logical: {:?}", physical, logical);
assert!(physical <= logical);
}
}
Make fix work only on ARM targets
//! # num_cpus
//!
//! A crate with utilities to determine the number of CPUs available on the
//! current system.
//!
//! ## Example
//!
//! ```
//! let cpus = num_cpus::get();
//! ```
#![cfg_attr(test, deny(warnings))]
#![deny(missing_docs)]
#![allow(non_snake_case)]
#[cfg(not(windows))]
extern crate libc;
/// Returns the number of available CPUs of the current system.
///
/// # Note
///
/// This will check sched affinity on Linux.
#[inline]
pub fn get() -> usize {
get_num_cpus()
}
/// Returns the number of physical cores of the current system.
///
/// If not possible on the particular architecture, returns same as `get()`
/// which is the logical CPUs.
#[inline]
pub fn get_physical() -> usize {
get_num_physical_cpus()
}
#[cfg(not(any(target_os = "linux", target_os = "windows", target_os="macos")))]
#[inline]
fn get_num_physical_cpus() -> usize {
// Not implemented, fallback
get_num_cpus()
}
#[cfg(target_os = "windows")]
fn get_num_physical_cpus() -> usize {
match get_num_physical_cpus_windows() {
Some(num) => num,
None => get_num_cpus()
}
}
#[cfg(target_os = "windows")]
fn get_num_physical_cpus_windows() -> Option<usize> {
// Inspired by https://msdn.microsoft.com/en-us/library/ms683194
use std::ptr;
use std::mem;
#[allow(non_upper_case_globals)]
const RelationProcessorCore: u32 = 0;
#[repr(C)]
#[allow(non_camel_case_types)]
struct SYSTEM_LOGICAL_PROCESSOR_INFORMATION {
mask: usize,
relationship: u32,
_unused: [u64; 2]
}
extern "system" {
fn GetLogicalProcessorInformation(
info: *mut SYSTEM_LOGICAL_PROCESSOR_INFORMATION,
length: &mut u32
) -> u32;
}
// First we need to determine how much space to reserve.
// The required size of the buffer, in bytes.
let mut needed_size = 0;
unsafe {
GetLogicalProcessorInformation(ptr::null_mut(), &mut needed_size);
}
let struct_size = mem::size_of::<SYSTEM_LOGICAL_PROCESSOR_INFORMATION>() as u32;
// Could be 0, or some other bogus size.
if needed_size == 0 || needed_size < struct_size || needed_size % struct_size != 0 {
return None;
}
let count = needed_size / struct_size;
// Allocate some memory where we will store the processor info.
let mut buf = Vec::with_capacity(count as usize);
let result;
unsafe {
result = GetLogicalProcessorInformation(buf.as_mut_ptr(), &mut needed_size);
}
// Failed for any reason.
if result == 0 {
return None;
}
let count = needed_size / struct_size;
unsafe {
buf.set_len(count as usize);
}
let phys_proc_count = buf.iter()
// Only interested in processor packages (physical processors.)
.filter(|proc_info| proc_info.relationship == RelationProcessorCore)
.count();
if phys_proc_count == 0 {
None
} else {
Some(phys_proc_count)
}
}
#[cfg(target_os = "linux")]
fn get_num_physical_cpus() -> usize {
use std::io::BufReader;
use std::io::BufRead;
use std::fs::File;
use std::collections::HashSet;
let file = match File::open("/proc/cpuinfo") {
Ok(val) => val,
Err(_) => {return get_num_cpus()},
};
let reader = BufReader::new(file);
let mut set = HashSet::new();
let mut coreid: u32 = 0;
let mut physid: u32 = 0;
let mut chgcount = 0;
for line in reader.lines().filter_map(|result| result.ok()) {
let parts: Vec<&str> = line.split(':').map(|s| s.trim()).collect();
if parts.len() != 2 {
continue
}
if parts[0] == "core id" || parts[0] == "physical id" {
let value = match parts[1].trim().parse() {
Ok(val) => val,
Err(_) => break,
};
match parts[0] {
"core id" => coreid = value,
"physical id" => physid = value,
_ => {},
}
chgcount += 1;
}
if chgcount == 2 {
set.insert((physid, coreid));
chgcount = 0;
}
}
let count = set.len();
if count == 0 { get_num_cpus() } else { count }
}
#[cfg(windows)]
fn get_num_cpus() -> usize {
#[repr(C)]
struct SYSTEM_INFO {
wProcessorArchitecture: u16,
wReserved: u16,
dwPageSize: u32,
lpMinimumApplicationAddress: *mut u8,
lpMaximumApplicationAddress: *mut u8,
dwActiveProcessorMask: *mut u8,
dwNumberOfProcessors: u32,
dwProcessorType: u32,
dwAllocationGranularity: u32,
wProcessorLevel: u16,
wProcessorRevision: u16,
}
extern "system" {
fn GetSystemInfo(lpSystemInfo: *mut SYSTEM_INFO);
}
unsafe {
let mut sysinfo: SYSTEM_INFO = std::mem::uninitialized();
GetSystemInfo(&mut sysinfo);
sysinfo.dwNumberOfProcessors as usize
}
}
#[cfg(any(target_os = "freebsd",
target_os = "dragonfly",
target_os = "bitrig",
target_os = "netbsd"))]
fn get_num_cpus() -> usize {
let mut cpus: libc::c_uint = 0;
let mut cpus_size = std::mem::size_of_val(&cpus);
unsafe {
cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint;
}
if cpus < 1 {
let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
unsafe {
libc::sysctl(mib.as_mut_ptr(),
2,
&mut cpus as *mut _ as *mut _,
&mut cpus_size as *mut _ as *mut _,
0 as *mut _,
0);
}
if cpus < 1 {
cpus = 1;
}
}
cpus as usize
}
#[cfg(target_os = "openbsd")]
fn get_num_cpus() -> usize {
let mut cpus: libc::c_uint = 0;
let mut cpus_size = std::mem::size_of_val(&cpus);
let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
unsafe {
libc::sysctl(mib.as_mut_ptr(),
2,
&mut cpus as *mut _ as *mut _,
&mut cpus_size as *mut _ as *mut _,
0 as *mut _,
0);
}
if cpus < 1 {
cpus = 1;
}
cpus as usize
}
#[cfg(target_os = "macos")]
fn get_num_physical_cpus() -> usize {
use std::ffi::CStr;
use std::ptr;
let mut cpus: i32 = 0;
let mut cpus_size = std::mem::size_of_val(&cpus);
let sysctl_name = CStr::from_bytes_with_nul(b"hw.physicalcpu\0")
.expect("byte literal is missing NUL");
unsafe {
if 0 != libc::sysctlbyname(sysctl_name.as_ptr(),
&mut cpus as *mut _ as *mut _,
&mut cpus_size as *mut _ as *mut _,
ptr::null_mut(),
0) {
return get_num_cpus();
}
}
cpus as usize
}
#[cfg(target_os = "linux")]
fn get_num_cpus() -> usize {
let mut set: libc::cpu_set_t = unsafe { std::mem::zeroed() };
if unsafe { libc::sched_getaffinity(0, std::mem::size_of::<libc::cpu_set_t>(), &mut set) } == 0 {
let mut count: u32 = 0;
for i in 0..libc::CPU_SETSIZE as usize {
if unsafe { libc::CPU_ISSET(i, &set) } {
count += 1
}
}
count as usize
} else {
let cpus = unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) };
if cpus < 1 {
1
} else {
cpus as usize
}
}
}
#[cfg(any(
target_os = "nacl",
target_os = "macos",
target_os = "ios",
target_os = "android",
target_os = "solaris",
target_os = "fuchsia")
)]
fn get_num_cpus() -> usize {
// On ARM targets, processors could be turned off to save power.
// Use `_SC_NPROCESSORS_CONF` to get the real number.
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
const CONF_NAME: libc::c_int = _SC_NPROCESSORS_CONF;
#[cfg(not(any(target_arch = "arm", target_arch = "aarch64")))]
const CONF_NAME: libc::c_int = _SC_NPROCESSORS_ONLN;
let cpus = unsafe { libc::sysconf(CONF_NAME) };
if cpus < 1 {
1
} else {
cpus as usize
}
}
#[cfg(any(target_os = "emscripten", target_os = "redox", target_os = "haiku"))]
fn get_num_cpus() -> usize {
1
}
#[cfg(test)]
mod tests {
fn env_var(name: &'static str) -> Option<usize> {
::std::env::var(name).ok().map(|val| val.parse().unwrap())
}
#[test]
fn test_get() {
let num = super::get();
if let Some(n) = env_var("NUM_CPUS_TEST_GET") {
assert_eq!(num, n);
} else {
assert!(num > 0);
assert!(num < 236_451);
}
}
#[test]
fn test_get_physical() {
let num = super::get_physical();
if let Some(n) = env_var("NUM_CPUS_TEST_GET_PHYSICAL") {
assert_eq!(num, n);
} else {
assert!(num > 0);
assert!(num < 236_451);
}
}
#[test]
fn test_physical_less_logical() {
let logical = super::get();
let physical = super::get_physical();
println!("physical: {:?}, logical: {:?}", physical, logical);
assert!(physical <= logical);
}
}
|
/*!
A basic library for finding primes, using the Sieve of Eratosthenes. This library provides methods
for generating primes, testing whether a number is prime, and factorizing numbers. Most methods
generate primes lazily, so only enough primes will be generated for the given test, and primes are
cached for later use.
To use, see `PrimeSet`, a class which handles the Sieve and has multiple methods for iterating
over primes.
This also provides a few functions unconnected to `PrimeSet`, which will be faster for the first case,
but slower in the long term as they do not use any caching of primes.
*/
#[warn(non_camel_case_types)]
#[warn(non_snake_case)]
#[warn(unused_qualifications)]
#[warn(non_upper_case_globals)]
#[warn(missing_docs)]
#[cfg(test)]
extern crate test;
use std::ops::Index;
use std::slice;
use std::iter;
use std::num::Float;
#[cfg(test)]
use test::Bencher;
fn sqrt_floor(n : uint) -> uint {
(n as f64).sqrt().floor() as uint
}
/** A prime generator, using the Sieve of Eratosthenes.
Create with `let mut pset = PrimeSet::new()`, and then use `pset.iter()` to iterate over all primes.
**/
pub struct PrimeSet {
lst : Vec<uint>
}
/// An iterator over generated primes. Created by PrimeSet::iter or
/// PrimeSet::generator
pub struct PrimeSetIter<'a> {
p : &'a mut PrimeSet,
n : uint,
expand : bool
}
impl PrimeSet {
/// A new prime generator, primed with 2 and 3
pub fn new() -> PrimeSet {
PrimeSet{lst:vec!(2,3)}
}
/// Finds one more prime, and adds it to the list
pub fn expand(&mut self) {
let mut l = self.lst[self.lst.len()-1] + 2;
let mut sql = sqrt_floor(l);
let mut remainder = 0;
loop {
for &n in self.lst.iter() {
remainder = l % n;
if remainder == 0 || n > sql {
break;
}
};
if remainder != 0 {
self.lst.push(l);
break;
};
l += 2;
sql = sqrt_floor(l);
}
}
/// Number of primes found so far
pub fn len(&self) -> uint {
self.lst.len()
}
/// Return all primes found so far as a slice
pub fn list<'a>(&'a self) -> &'a [uint] {
self.lst.as_slice()
}
/// Iterator over all primes not yet found
pub fn generator<'a>(&'a mut self) -> PrimeSetIter<'a> {
let myn = self.len();
PrimeSetIter{p:self, n:myn, expand:true}
}
/// Iterator over all primes, starting with 2. If you don't care about the "state" of the
/// PrimeSet, this is what you want!
pub fn iter<'a>(&'a mut self) -> PrimeSetIter<'a> {
PrimeSetIter{p:self, n:0, expand:true}
}
//~ pub fn iter_once(&'self mut self) -> PrimeSetIter<'self> {
//~ PrimeSetIter{p:self, n:0, expand:false}
//~ }
/// Iterator over just the primes found so far
pub fn iter_vec<'a>(&'a self) -> slice::Iter<'a, uint> {
self.lst.iter()
}
/// Find the next largest prime from a number
/// Returns (idx, prime)
/// Note that if n is prime, then the output will be (idx, n)
pub fn find(&mut self, n: uint) -> (uint, uint) {
while n > *(self.lst.last().unwrap_or(&0)){
self.expand();
}
self.find_vec(n).unwrap()
}
/// Check if a number is prime
/// Note that this only requires primes up to n.sqrt() to be generated, and will generate
/// them as necessary on its own.
pub fn is_prime(&mut self, n: uint) -> bool {
if n <= 1 {return false;}
if n == 2 {return true;} // otherwise we get 2 % 2 == 0!
for m in self.iter() {
if n % m == 0 {return false;};
if m*m > n {return true;};
}
panic!("This iterator should not be empty.");
}
/// Find the next largest prime from a number, if it is within the already-found list
/// Returns (idx, prime)
/// Note that if n is prime, then the output will be (idx, n)
pub fn find_vec(&self, n: uint) -> Option<(uint, uint)> {
if n > *(self.lst.last().unwrap_or(&0)){ return None;}
let mut base : uint = 0;
let mut lim : uint = self.len();
while lim != 0 {
let ix = base + (lim >> 1);
match self.lst[ix].cmp(&n) {
Equal => return Some((ix, self.lst[ix])),
Less => {
base = ix + 1;
lim -= 1;
}
Greater => ()
}
lim >>= 1;
}
return Some((base, self.lst[base]));
}
/// Get the nth prime, even if we haven't yet found it
pub fn get(&mut self, index : &uint) -> &uint {
for _ in range(0, (*index as int) + 1 - (self.lst.len() as int)){
self.expand();
}
self.lst.index(index)
}
/// Get the prime factors of a number, starting from 2, including repeats
pub fn prime_factors(&mut self, n: uint) -> Vec<uint> {
if n == 1 {return Vec::new();}
let mut curn = n;
let mut m = ((curn as f64).sqrt()).ceil() as uint;
let mut lst: Vec<uint> = Vec::new();
for p in self.iter() {
while curn % p == 0 {
lst.push(p);
curn /= p;
if curn == 1 {return lst;}
m = ((curn as f64).sqrt()).ceil() as uint;
}
if p > m {
lst.push(p);
return lst;
}
}
panic!("This should be unreachable.");
}
}
impl Index<uint, uint> for PrimeSet {
fn index(&self, index: &uint) -> &uint {
self.lst.index(index)
}
}
impl<'a> Iterator<uint> for PrimeSetIter<'a> {
fn next(&mut self) -> Option<uint> {
while self.n >= self.p.len(){
match self.expand {
true => self.p.expand(),
false => return None
}
}
self.n += 1;
let m = self.p.lst[self.n-1];
Some(m)
}
}
/// Find the first factor (other than 1) of a number
fn firstfac(x: uint) -> uint {
let m = ((x as f64).sqrt()).ceil() as uint;
if x % 2 == 0 { return 2; };
for n in iter::range_step(3, m + 1, 2) {
if x % n == 0 { return n; };
}
return x;
}
/// Find all prime factors of a number
/// Does not use a PrimeSet, but simply counts upwards
pub fn factors(x: uint) -> Vec<uint> {
if x <= 1 {return vec!()};
let mut lst: Vec<uint> = Vec::new();
let mut curn = x;
loop {
let m = firstfac(curn);
lst.push(m);
if m == curn { break } else { curn /= m };
}
return lst
}
/// Find all unique prime factors of a number
pub fn factors_uniq(x: uint) -> Vec<uint> {
let mut lst: Vec<uint> = Vec::new();
let mut curn = x;
loop {
let m = firstfac(curn);
lst.push(m);
if curn == m { break ; }
while curn % m == 0 { curn /= m; }
if curn == 1 { break ; }
}
return lst
}
/// Test whether a number is prime. Checks every odd number up to sqrt(n).
pub fn is_prime(n : uint) -> bool {
if n <= 1 {return false;}
firstfac(n) == n
}
#[test]
fn test_iter(){
let mut pset = PrimeSet::new();
let first_few = [2u,3,5,7,11,13,17,19,23];
for (m, &n) in pset.iter().zip(first_few.iter()) {
assert_eq!(m, n);
}
}
#[test]
fn test_primes(){
let mut pset = PrimeSet::new();
// note: some are repeated, because the pset list grows as it goes
assert!(!pset.is_prime(1));
assert!(!is_prime(1));
assert!(pset.is_prime(2));
assert!(is_prime(2));
assert!(pset.is_prime(13));
assert!(is_prime(13));
assert!(!pset.is_prime(45));
assert!(!is_prime(45));
assert!(!pset.is_prime(13*13));
assert!(!is_prime(13*13));
assert!(pset.is_prime(13));
assert!(pset.is_prime(7));
assert!(is_prime(7));
assert!(!pset.is_prime(9));
assert!(!is_prime(9));
assert!(pset.is_prime(5));
assert!(is_prime(5));
}
#[test]
fn test_factors(){
let mut pset = PrimeSet::new();
let ns = [ (1, vec!()),
(2, vec!(2)),
(3, vec!(3)),
(4, vec!(2,2)),
(5, vec!(5)),
(6, vec!(2,3)),
(9, vec!(3,3)),
(12, vec!(2,2,3)),
(121, vec!(11,11)),
(144, vec!(2,2,2,2,3,3)),
(10_000_000, vec!(2,2,2,2,2,2,2,5,5,5,5,5,5,5)),
(100, vec!(2,2,5,5)),
(121, vec!(11, 11)),
];
for &(n, ref v) in ns.iter(){
assert_eq!(pset.prime_factors(n), *v);
assert_eq!(factors(n), *v);
}
pset = PrimeSet::new();
assert_eq!(pset.prime_factors(12), vec!(2,2,3));
}
#[bench]
fn bench_primes(b : &mut Bencher){
b.iter(|| {
let mut pset = PrimeSet::new();
let (_, _) = pset.find(1_000_000);
//~ let (idx, n) = pset.find(1_000_000);
//~ println!("Prime {}: {}", idx, n);
})
}
Updated to Rust 1.0-alpha
/*!
A basic library for finding primes, using the Sieve of Eratosthenes. This library provides methods
for generating primes, testing whether a number is prime, and factorizing numbers. Most methods
generate primes lazily, so only enough primes will be generated for the given test, and primes are
cached for later use.
To use, see `PrimeSet`, a class which handles the Sieve and has multiple methods for iterating
over primes.
This also provides a few functions unconnected to `PrimeSet`, which will be faster for the first case,
but slower in the long term as they do not use any caching of primes.
*/
#[warn(non_camel_case_types)]
#[warn(non_snake_case)]
#[warn(unused_qualifications)]
#[warn(non_upper_case_globals)]
#[warn(missing_docs)]
#[cfg(test)]
extern crate test;
use std::ops::Index;
use std::slice;
use std::iter;
use std::num::{Float,cast};
use std::cmp::Ordering::{Equal,Less,Greater};
#[cfg(test)]
use test::Bencher;
fn sqrt_floor<T: std::num::NumCast>(n : T) -> T {
cast::<f64, T>(
(cast::<T, f64>(n).unwrap()).sqrt().floor()
).unwrap()
}
fn sqrt_ceil<T: std::num::NumCast>(n : T) -> T {
cast::<f64, T>(
(cast::<T, f64>(n).unwrap()).sqrt().ceil()
).unwrap()
}
/** A prime generator, using the Sieve of Eratosthenes.
Create with `let mut pset = PrimeSet::new()`, and then use `pset.iter()` to iterate over all primes.
**/
pub struct PrimeSet {
lst : Vec<u64>
}
/// An iterator over generated primes. Created by PrimeSet::iter or
/// PrimeSet::generator
pub struct PrimeSetIter<'a> {
p : &'a mut PrimeSet,
n : usize,
expand : bool
}
impl PrimeSet {
/// A new prime generator, primed with 2 and 3
pub fn new() -> PrimeSet {
PrimeSet{lst:vec!(2,3)}
}
/// Finds one more prime, and adds it to the list
pub fn expand(&mut self) {
let mut l : u64 = self.lst[self.lst.len()-1] + 2;
let mut sql = sqrt_floor(l);
let mut remainder = 0;
loop {
for &n in self.lst.iter() {
remainder = l % n;
if remainder == 0 || n > sql {
break;
}
};
if remainder != 0 {
self.lst.push(l);
break;
};
l += 2;
sql = sqrt_floor(l);
}
}
/// Number of primes found so far
pub fn len(&self) -> usize {
self.lst.len()
}
/// Return all primes found so far as a slice
pub fn list<'a>(&'a self) -> &'a [u64] {
self.lst.as_slice()
}
/// Iterator over all primes not yet found
pub fn generator<'a>(&'a mut self) -> PrimeSetIter<'a> {
let myn = self.len();
PrimeSetIter{p:self, n:myn, expand:true}
}
/// Iterator over all primes, starting with 2. If you don't care about the "state" of the
/// PrimeSet, this is what you want!
pub fn iter<'a>(&'a mut self) -> PrimeSetIter<'a> {
PrimeSetIter{p:self, n:0, expand:true}
}
//~ pub fn iter_once(&'self mut self) -> PrimeSetIter<'self> {
//~ PrimeSetIter{p:self, n:0, expand:false}
//~ }
/// Iterator over just the primes found so far
pub fn iter_vec<'a>(&'a self) -> slice::Iter<'a, u64> {
self.lst.iter()
}
/// Find the next largest prime from a number
/// Returns (idx, prime)
/// Note that if n is prime, then the output will be (idx, n)
pub fn find(&mut self, n: u64) -> (usize, u64) {
while n > *(self.lst.last().unwrap_or(&0)){
self.expand();
}
self.find_vec(n).unwrap()
}
/// Check if a number is prime
/// Note that this only requires primes up to n.sqrt() to be generated, and will generate
/// them as necessary on its own.
pub fn is_prime(&mut self, n: u64) -> bool {
if n <= 1 {return false;}
if n == 2 {return true;} // otherwise we get 2 % 2 == 0!
for m in self.iter() {
if n % m == 0 {return false;};
if m*m > n {return true;};
}
panic!("This iterator should not be empty.");
}
/// Find the next largest prime from a number, if it is within the already-found list
/// Returns (idx, prime)
/// Note that if n is prime, then the output will be (idx, n)
pub fn find_vec(&self, n: u64) -> Option<(usize, u64)> {
if n > *(self.lst.last().unwrap_or(&0)){ return None;}
let mut base : usize = 0;
let mut lim : usize = self.len();
// Binary search algorithm
while lim != 0 {
let ix = base + (lim >> 1);
match self.lst[ix].cmp(&n) {
Equal => return Some((ix, self.lst[ix])),
Less => {
base = ix + 1;
lim -= 1;
}
Greater => ()
}
lim >>= 1;
}
return Some((base, self.lst[base]));
}
/// Get the nth prime, even if we haven't yet found it
pub fn get(&mut self, index : &usize) -> &u64 {
for _ in range(0, (*index as isize) + 1 - (self.lst.len() as isize)){
self.expand();
}
self.lst.index(index)
}
/// Get the prime factors of a number, starting from 2, including repeats
pub fn prime_factors(&mut self, n: u64) -> Vec<u64> {
if n == 1 {return Vec::new();}
let mut curn = n;
let mut m = sqrt_ceil(curn);
let mut lst: Vec<u64> = Vec::new();
for p in self.iter() {
while curn % p == 0 {
lst.push(p);
curn /= p;
if curn == 1 {return lst;}
m = sqrt_ceil(curn);
}
if p > m {
lst.push(p);
return lst;
}
}
panic!("This should be unreachable.");
}
}
impl Index<usize> for PrimeSet {
type Output = u64;
fn index(&self, index: &usize) -> &u64 {
self.lst.index(index)
}
}
impl<'a> Iterator for PrimeSetIter<'a> {
type Item = u64;
fn next(&mut self) -> Option<u64> {
while self.n >= self.p.len(){
match self.expand {
true => self.p.expand(),
false => return None
}
}
self.n += 1;
let m = self.p.lst[self.n-1];
Some(m)
}
}
/// Find the first factor (other than 1) of a number
fn firstfac(x: u64) -> u64 {
let m = sqrt_ceil(x);
if x % 2 == 0 { return 2; };
for n in iter::range_step(3, m + 1, 2) {
if x % n == 0 { return n; };
}
return x;
}
/// Find all prime factors of a number
/// Does not use a PrimeSet, but simply counts upwards
pub fn factors(x: u64) -> Vec<u64> {
if x <= 1 {return vec!()};
let mut lst: Vec<u64> = Vec::new();
let mut curn = x;
loop {
let m = firstfac(curn);
lst.push(m);
if m == curn { break } else { curn /= m };
}
return lst
}
/// Find all unique prime factors of a number
pub fn factors_uniq(x: u64) -> Vec<u64> {
let mut lst: Vec<u64> = Vec::new();
let mut curn = x;
loop {
let m = firstfac(curn);
lst.push(m);
if curn == m { break ; }
while curn % m == 0 { curn /= m; }
if curn == 1 { break ; }
}
return lst
}
/// Test whether a number is prime. Checks every odd number up to sqrt(n).
pub fn is_prime(n : u64) -> bool {
if n <= 1 {return false;}
firstfac(n) == n
}
#[test]
fn test_iter(){
let mut pset = PrimeSet::new();
let first_few = [2u64,3,5,7,11,13,17,19,23];
for (m, &n) in pset.iter().zip(first_few.iter()) {
assert_eq!(m, n);
}
}
#[test]
fn test_primes(){
let mut pset = PrimeSet::new();
// note: some are repeated, because the pset list grows as it goes
assert!(!pset.is_prime(1));
assert!(!is_prime(1));
assert!(pset.is_prime(2));
assert!(is_prime(2));
assert!(pset.is_prime(13));
assert!(is_prime(13));
assert!(!pset.is_prime(45));
assert!(!is_prime(45));
assert!(!pset.is_prime(13*13));
assert!(!is_prime(13*13));
assert!(pset.is_prime(13));
assert!(pset.is_prime(7));
assert!(is_prime(7));
assert!(!pset.is_prime(9));
assert!(!is_prime(9));
assert!(pset.is_prime(5));
assert!(is_prime(5));
}
#[test]
fn test_factors(){
let mut pset = PrimeSet::new();
let ns = [ (1, vec!()),
(2, vec!(2)),
(3, vec!(3)),
(4, vec!(2,2)),
(5, vec!(5)),
(6, vec!(2,3)),
(9, vec!(3,3)),
(12, vec!(2,2,3)),
(121, vec!(11,11)),
(144, vec!(2,2,2,2,3,3)),
(10_000_000, vec!(2,2,2,2,2,2,2,5,5,5,5,5,5,5)),
(100, vec!(2,2,5,5)),
(121, vec!(11, 11)),
];
for &(n, ref v) in ns.iter(){
assert_eq!(pset.prime_factors(n), *v);
assert_eq!(factors(n), *v);
}
pset = PrimeSet::new();
assert_eq!(pset.prime_factors(12), vec!(2,2,3));
}
#[bench]
fn bench_primes(b : &mut Bencher){
b.iter(|| {
let mut pset = PrimeSet::new();
let (_, _) = pset.find(1_000_000);
//~ let (idx, n) = pset.find(1_000_000);
//~ println!("Prime {}: {}", idx, n);
})
}
|
//! Setup and control loop devices.
//!
//! Provides rust interface with similar functionality to the Linux utility `losetup`.
//!
//! # Examples
//!
//! ```rust
//! use loopdev::LoopControl;
//! let lc = LoopControl::open().unwrap();
//! let ld = lc.next_free().unwrap();
//!
//! println!("{}", ld.path().unwrap().display());
//!
//! ld.attach_file("test.img").unwrap();
//! // ...
//! ld.detach().unwrap();
//! ```
extern crate libc;
use std::fs::File;
use std::fs::OpenOptions;
use libc::{c_int, ioctl};
use std::default::Default;
use std::io;
use std::os::unix::prelude::*;
use std::path::{Path, PathBuf};
// TODO support missing operations
const LOOP_SET_FD: u16 = 0x4C00;
const LOOP_CLR_FD: u16 = 0x4C01;
const LOOP_SET_STATUS64: u16 = 0x4C04;
//const LOOP_GET_STATUS64: u16 = 0x4C05;
const LOOP_SET_CAPACITY: u16 = 0x4C07;
//const LOOP_SET_DIRECT_IO: u16 = 0x4C08;
//const LOOP_SET_BLOCK_SIZE: u16 = 0x4C09;
//const LOOP_CTL_ADD: u16 = 0x4C80;
//const LOOP_CTL_REMOVE: u16 = 0x4C81;
const LOOP_CTL_GET_FREE: u16 = 0x4C82;
const LOOP_CONTROL: &str = "/dev/loop-control";
const LOOP_PREFIX: &str = "/dev/loop";
/// Interface to the loop control device: `/dev/loop-control`.
#[derive(Debug)]
pub struct LoopControl {
dev_file: File,
}
impl LoopControl {
/// Opens the loop control device.
pub fn open() -> io::Result<Self> {
Ok(Self {
dev_file: try!(OpenOptions::new().read(true).write(true).open(LOOP_CONTROL)),
})
}
/// Finds and opens the next available loop device.
///
/// # Examples
///
/// ```rust
/// use loopdev::LoopControl;
/// let lc = LoopControl::open().unwrap();
/// let ld = lc.next_free().unwrap();
/// println!("{}", ld.path().unwrap().display());
/// ```
pub fn next_free(&self) -> io::Result<LoopDevice> {
let result;
unsafe {
result = ioctl(self.dev_file.as_raw_fd() as c_int, LOOP_CTL_GET_FREE.into());
}
if result < 0 {
Err(io::Error::last_os_error())
} else {
Ok(try!(LoopDevice::open(&format!(
"{}{}",
LOOP_PREFIX, result
))))
}
}
}
/// Interface to a loop device ie `/dev/loop0`.
#[derive(Debug)]
pub struct LoopDevice {
device: File,
}
impl AsRawFd for LoopDevice {
fn as_raw_fd(&self) -> RawFd {
self.device.as_raw_fd()
}
}
impl LoopDevice {
/// Opens a loop device.
pub fn open<P: AsRef<Path>>(dev: P) -> io::Result<Self> {
// TODO create dev if it does not exist and begins with LOOP_PREFIX
let f = try!(OpenOptions::new().read(true).write(true).open(dev));
Ok(Self { device: f })
}
/// Attach the loop device to a file starting at offset into the file.
#[deprecated(
since = "0.2.0",
note = "use `attach_file` or `attach_with_offset` instead"
)]
pub fn attach<P: AsRef<Path>>(&self, backing_file: P, offset: u64) -> io::Result<()> {
self.attach_with_sizelimit(backing_file, offset, 0)
}
/// Attach the loop device to a file that maps to the whole file.
///
/// # Examples
///
/// Attach the device to a file.
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop4").unwrap();
/// ld.attach_file("test.img").unwrap();
/// # ld.detach().unwrap();
/// ```
pub fn attach_file<P: AsRef<Path>>(&self, backing_file: P) -> io::Result<()> {
self.attach_with_sizelimit(backing_file, 0, 0)
}
/// Attach the loop device to a file starting at offset into the file.
///
/// # Examples
///
/// Attach the device to the start of a file.
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop5").unwrap();
/// ld.attach_with_offset("test.img", 0).unwrap();
/// # ld.detach().unwrap();
/// ```
pub fn attach_with_offset<P: AsRef<Path>>(
&self,
backing_file: P,
offset: u64,
) -> io::Result<()> {
self.attach_with_sizelimit(backing_file, offset, 0)
}
/// Attach the loop device to a file starting at offset into the file and a the given sizelimit.
///
/// # Examples
///
/// Attach the device to the start of a file with a maximum size of 1024 bytes.
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop6").unwrap();
/// ld.attach_with_sizelimit("test.img", 0, 1024).unwrap();
/// # ld.detach().unwrap();
/// ```
pub fn attach_with_sizelimit<P: AsRef<Path>>(
&self,
backing_file: P,
offset: u64,
sizelimit: u64,
) -> io::Result<()> {
let bf = try!(OpenOptions::new().read(true).write(true).open(backing_file));
// Attach the file
unsafe {
if ioctl(
self.device.as_raw_fd() as c_int,
LOOP_SET_FD.into(),
bf.as_raw_fd() as c_int,
) < 0
{
return Err(io::Error::last_os_error());
}
}
// Set offset for backing_file
let mut info = loop_info64::default();
info.lo_offset = offset;
info.lo_sizelimit = sizelimit;
unsafe {
if ioctl(
self.device.as_raw_fd() as c_int,
LOOP_SET_STATUS64.into(),
&mut info,
) < 0
{
try!(self.detach());
return Err(io::Error::last_os_error());
}
}
Ok(())
}
/// Get the path of the loop device.
#[deprecated(since = "0.2.0", note = "use `path` instead")]
pub fn get_path(&self) -> Option<PathBuf> {
self.path()
}
/// Get the path of the loop device.
pub fn path(&self) -> Option<PathBuf> {
let mut p = PathBuf::from("/proc/self/fd");
p.push(self.device.as_raw_fd().to_string());
std::fs::read_link(&p).ok()
}
/// Detach a loop device from its backing file.
///
/// # Examples
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop7").unwrap();
/// # ld.attach_file("test.img").unwrap();
/// ld.detach().unwrap();
/// ```
pub fn detach(&self) -> io::Result<()> {
unsafe {
if ioctl(self.device.as_raw_fd() as c_int, LOOP_CLR_FD.into(), 0) < 0 {
Err(io::Error::last_os_error())
} else {
Ok(())
}
}
}
/// Resize a live loop device. If the size of the backing file changes this can be called to
/// inform the loop driver about the new size.
pub fn set_capacity(&self) -> io::Result<()> {
println!("running set_capacity");
unsafe {
if ioctl(
self.device.as_raw_fd() as c_int,
LOOP_SET_CAPACITY.into(),
0,
) < 0
{
Err(io::Error::last_os_error())
} else {
println!("ok");
Ok(())
}
}
}
}
#[repr(C)]
struct loop_info64 {
pub lo_device: u64,
pub lo_inode: u64,
pub lo_rdevice: u64,
pub lo_offset: u64,
pub lo_sizelimit: u64,
pub lo_number: u32,
pub lo_encrypt_type: u32,
pub lo_encrypt_key_size: u32,
pub lo_flags: u32,
pub lo_file_name: [u8; 64],
pub lo_crypt_name: [u8; 64],
pub lo_encrypt_key: [u8; 32],
pub lo_init: [u64; 2],
}
impl Default for loop_info64 {
fn default() -> Self {
Self {
lo_device: 0,
lo_inode: 0,
lo_rdevice: 0,
lo_offset: 0,
lo_sizelimit: 0,
lo_number: 0,
lo_encrypt_type: 0,
lo_encrypt_key_size: 0,
lo_flags: 0,
lo_file_name: [0; 64],
lo_crypt_name: [0; 64],
lo_encrypt_key: [0; 32],
lo_init: [0; 2],
}
}
}
Converts try! to ?
//! Setup and control loop devices.
//!
//! Provides rust interface with similar functionality to the Linux utility `losetup`.
//!
//! # Examples
//!
//! ```rust
//! use loopdev::LoopControl;
//! let lc = LoopControl::open().unwrap();
//! let ld = lc.next_free().unwrap();
//!
//! println!("{}", ld.path().unwrap().display());
//!
//! ld.attach_file("test.img").unwrap();
//! // ...
//! ld.detach().unwrap();
//! ```
extern crate libc;
use std::fs::File;
use std::fs::OpenOptions;
use libc::{c_int, ioctl};
use std::default::Default;
use std::io;
use std::os::unix::prelude::*;
use std::path::{Path, PathBuf};
// TODO support missing operations
const LOOP_SET_FD: u16 = 0x4C00;
const LOOP_CLR_FD: u16 = 0x4C01;
const LOOP_SET_STATUS64: u16 = 0x4C04;
//const LOOP_GET_STATUS64: u16 = 0x4C05;
const LOOP_SET_CAPACITY: u16 = 0x4C07;
//const LOOP_SET_DIRECT_IO: u16 = 0x4C08;
//const LOOP_SET_BLOCK_SIZE: u16 = 0x4C09;
//const LOOP_CTL_ADD: u16 = 0x4C80;
//const LOOP_CTL_REMOVE: u16 = 0x4C81;
const LOOP_CTL_GET_FREE: u16 = 0x4C82;
const LOOP_CONTROL: &str = "/dev/loop-control";
const LOOP_PREFIX: &str = "/dev/loop";
/// Interface to the loop control device: `/dev/loop-control`.
#[derive(Debug)]
pub struct LoopControl {
dev_file: File,
}
impl LoopControl {
/// Opens the loop control device.
pub fn open() -> io::Result<Self> {
Ok(Self {
dev_file: OpenOptions::new()
.read(true)
.write(true)
.open(LOOP_CONTROL)?,
})
}
/// Finds and opens the next available loop device.
///
/// # Examples
///
/// ```rust
/// use loopdev::LoopControl;
/// let lc = LoopControl::open().unwrap();
/// let ld = lc.next_free().unwrap();
/// println!("{}", ld.path().unwrap().display());
/// ```
pub fn next_free(&self) -> io::Result<LoopDevice> {
let result;
unsafe {
result = ioctl(self.dev_file.as_raw_fd() as c_int, LOOP_CTL_GET_FREE.into());
}
if result < 0 {
Err(io::Error::last_os_error())
} else {
LoopDevice::open(&format!("{}{}", LOOP_PREFIX, result))
}
}
}
/// Interface to a loop device ie `/dev/loop0`.
#[derive(Debug)]
pub struct LoopDevice {
device: File,
}
impl AsRawFd for LoopDevice {
fn as_raw_fd(&self) -> RawFd {
self.device.as_raw_fd()
}
}
impl LoopDevice {
/// Opens a loop device.
pub fn open<P: AsRef<Path>>(dev: P) -> io::Result<Self> {
// TODO create dev if it does not exist and begins with LOOP_PREFIX
let f = OpenOptions::new().read(true).write(true).open(dev)?;
Ok(Self { device: f })
}
/// Attach the loop device to a file starting at offset into the file.
#[deprecated(
since = "0.2.0",
note = "use `attach_file` or `attach_with_offset` instead"
)]
pub fn attach<P: AsRef<Path>>(&self, backing_file: P, offset: u64) -> io::Result<()> {
self.attach_with_sizelimit(backing_file, offset, 0)
}
/// Attach the loop device to a file that maps to the whole file.
///
/// # Examples
///
/// Attach the device to a file.
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop4").unwrap();
/// ld.attach_file("test.img").unwrap();
/// # ld.detach().unwrap();
/// ```
pub fn attach_file<P: AsRef<Path>>(&self, backing_file: P) -> io::Result<()> {
self.attach_with_sizelimit(backing_file, 0, 0)
}
/// Attach the loop device to a file starting at offset into the file.
///
/// # Examples
///
/// Attach the device to the start of a file.
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop5").unwrap();
/// ld.attach_with_offset("test.img", 0).unwrap();
/// # ld.detach().unwrap();
/// ```
pub fn attach_with_offset<P: AsRef<Path>>(
&self,
backing_file: P,
offset: u64,
) -> io::Result<()> {
self.attach_with_sizelimit(backing_file, offset, 0)
}
/// Attach the loop device to a file starting at offset into the file and a the given sizelimit.
///
/// # Examples
///
/// Attach the device to the start of a file with a maximum size of 1024 bytes.
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop6").unwrap();
/// ld.attach_with_sizelimit("test.img", 0, 1024).unwrap();
/// # ld.detach().unwrap();
/// ```
pub fn attach_with_sizelimit<P: AsRef<Path>>(
&self,
backing_file: P,
offset: u64,
sizelimit: u64,
) -> io::Result<()> {
let bf = OpenOptions::new()
.read(true)
.write(true)
.open(backing_file)?;
// Attach the file
unsafe {
if ioctl(
self.device.as_raw_fd() as c_int,
LOOP_SET_FD.into(),
bf.as_raw_fd() as c_int,
) < 0
{
return Err(io::Error::last_os_error());
}
}
// Set offset for backing_file
let mut info = loop_info64::default();
info.lo_offset = offset;
info.lo_sizelimit = sizelimit;
unsafe {
if ioctl(
self.device.as_raw_fd() as c_int,
LOOP_SET_STATUS64.into(),
&mut info,
) < 0
{
self.detach()?;
return Err(io::Error::last_os_error());
}
}
Ok(())
}
/// Get the path of the loop device.
#[deprecated(since = "0.2.0", note = "use `path` instead")]
pub fn get_path(&self) -> Option<PathBuf> {
self.path()
}
/// Get the path of the loop device.
pub fn path(&self) -> Option<PathBuf> {
let mut p = PathBuf::from("/proc/self/fd");
p.push(self.device.as_raw_fd().to_string());
std::fs::read_link(&p).ok()
}
/// Detach a loop device from its backing file.
///
/// # Examples
///
/// ```rust
/// use loopdev::LoopDevice;
/// let ld = LoopDevice::open("/dev/loop7").unwrap();
/// # ld.attach_file("test.img").unwrap();
/// ld.detach().unwrap();
/// ```
pub fn detach(&self) -> io::Result<()> {
unsafe {
if ioctl(self.device.as_raw_fd() as c_int, LOOP_CLR_FD.into(), 0) < 0 {
Err(io::Error::last_os_error())
} else {
Ok(())
}
}
}
/// Resize a live loop device. If the size of the backing file changes this can be called to
/// inform the loop driver about the new size.
pub fn set_capacity(&self) -> io::Result<()> {
println!("running set_capacity");
unsafe {
if ioctl(
self.device.as_raw_fd() as c_int,
LOOP_SET_CAPACITY.into(),
0,
) < 0
{
Err(io::Error::last_os_error())
} else {
println!("ok");
Ok(())
}
}
}
}
#[repr(C)]
struct loop_info64 {
pub lo_device: u64,
pub lo_inode: u64,
pub lo_rdevice: u64,
pub lo_offset: u64,
pub lo_sizelimit: u64,
pub lo_number: u32,
pub lo_encrypt_type: u32,
pub lo_encrypt_key_size: u32,
pub lo_flags: u32,
pub lo_file_name: [u8; 64],
pub lo_crypt_name: [u8; 64],
pub lo_encrypt_key: [u8; 32],
pub lo_init: [u64; 2],
}
impl Default for loop_info64 {
fn default() -> Self {
Self {
lo_device: 0,
lo_inode: 0,
lo_rdevice: 0,
lo_offset: 0,
lo_sizelimit: 0,
lo_number: 0,
lo_encrypt_type: 0,
lo_encrypt_key_size: 0,
lo_flags: 0,
lo_file_name: [0; 64],
lo_crypt_name: [0; 64],
lo_encrypt_key: [0; 32],
lo_init: [0; 2],
}
}
}
|
//! rust-sessions
//!
//! This is an implementation of *session types* in Rust.
//!
//! The channels in Rusts standard library are useful for a great many things,
//! but they're restricted to a single type. Session types allows one to use a
//! single channel for transferring values of different types, depending on the
//! context in which it is used. Specifically, a session typed channel always
//! carry a *protocol*, which dictates how communication is to take place.
//!
//! For example, imagine that two threads, `A` and `B` want to communicate with
//! the following pattern:
//!
//! 1. `A` sends an integer to `B`.
//! 2. `B` sends a boolean to `A` depending on the integer received.
//!
//! With session types, this could be done by sharing a single channel. From
//! `A`'s point of view, it would have the type `int ! (bool ? eps)` where `t ! r`
//! is the protocol "send something of type `t` then proceed with
//! protocol `r`", the protocol `t ? r` is "receive something of type `t` then proceed
//! with protocol `r`, and `eps` is a special marker indicating the end of a
//! communication session.
//!
//! Our session type library allows the user to create channels that adhere to a
//! specified protocol. For example, a channel like the above would have the type
//! `Chan<(), Send<i64, Recv<bool, Eps>>>`, and the full program could look like this:
//!
//! ```
//! extern crate rust_sessions;
//! use rust_sessions::*;
//!
//! type Server = Recv<i64, Send<bool, Eps>>;
//! type Client = Send<i64, Recv<bool, Eps>>;
//!
//! fn srv(c: Chan<(), Server>) {
//! let (c, n) = c.recv();
//! if n % 2 == 0 {
//! c.send(true).close()
//! } else {
//! c.send(false).close()
//! }
//! }
//!
//! fn cli(c: Chan<(), Client>) {
//! let n = 42;
//! let c = c.send(n);
//! let (c, b) = c.recv();
//!
//! if b {
//! println!("{} is even", n);
//! } else {
//! println!("{} is odd", n);
//! }
//!
//! c.close();
//! }
//!
//! fn main() {
//! connect(srv, cli);
//! }
//! ```
#![feature(std_misc)]
#![feature(scoped)]
use std::marker;
use std::thread::scoped;
use std::mem::transmute;
use std::sync::mpsc::{Sender, Receiver, channel};
use std::collections::HashMap;
use std::sync::mpsc::Select;
use std::marker::PhantomData;
/// A session typed channel. `T` is the protocol and `E` is the environment,
/// containing potential recursion targets
pub struct Chan<E, T> (Sender<Box<u8>>, Receiver<Box<u8>>, PhantomData<(E, T)>);
fn unsafe_write_chan<A: marker::Send + 'static, E, T>
(&Chan(ref tx, _, _): &Chan<E, T>, x: A)
{
let tx: &Sender<Box<A>> = unsafe { transmute(tx) };
tx.send(Box::new(x)).unwrap();
}
fn unsafe_read_chan<A: marker::Send + 'static, E, T>
(&Chan(_, ref rx, _): &Chan<E, T>) -> A
{
let rx: &Receiver<Box<A>> = unsafe { transmute(rx) };
*rx.recv().unwrap()
}
/// Peano numbers: Zero
#[allow(missing_copy_implementations)]
pub struct Z;
/// Peano numbers: Plus one
pub struct S<P> ( PhantomData<P> );
/// End of communication session (epsilon)
#[allow(missing_copy_implementations)]
pub struct Eps;
/// Receive `A`, then `R`
pub struct Recv<A,R> ( PhantomData<(A, R)> );
/// Send `A`, then `R`
pub struct Send<A,R> ( PhantomData<(A, R)> );
/// Active choice between `R` and `S`
pub struct Choose<R,S> ( PhantomData<(R, S)> );
/// Passive choice (offer) between `R` and `S`
pub struct Offer<R,S> ( PhantomData<(R, S)> );
/// Enter a recursive environment
pub struct Rec<R> ( PhantomData<R> );
/// Recurse. V indicates how many layers of the recursive environment we recurse
/// out of.
pub struct Var<V> ( PhantomData<V> );
pub unsafe trait HasDual {
type Dual;
}
unsafe impl HasDual for Eps {
type Dual = Eps;
}
unsafe impl <A, T: HasDual> HasDual for Send<A, T> {
type Dual = Recv<A, T::Dual>;
}
unsafe impl <A, T: HasDual> HasDual for Recv<A, T> {
type Dual = Send<A, T::Dual>;
}
unsafe impl <R: HasDual, S: HasDual> HasDual for Choose<R, S> {
type Dual = Offer<R::Dual, S::Dual>;
}
unsafe impl <R: HasDual, S: HasDual> HasDual for Offer<R, S> {
type Dual = Choose<R::Dual, S::Dual>;
}
unsafe impl HasDual for Var<Z> {
type Dual = Var<Z>;
}
unsafe impl <N> HasDual for Var<S<N>> {
type Dual = Var<S<N>>; // TODO bound on N?
}
unsafe impl <T: HasDual> HasDual for Rec<T> {
type Dual = Rec<T::Dual>;
}
impl<E> Chan<E, Eps> {
/// Close a channel. Should always be used at the end of your program.
pub fn close(self) {
// Consume `c`
}
}
impl<E, T, A: marker::Send + 'static> Chan<E, Send<A, T>> {
/// Send a value of type `A` over the channel. Returns a channel with
/// protocol `T`
pub fn send(self, v: A) -> Chan<E, T> {
unsafe_write_chan(&self, v);
unsafe { transmute(self) }
}
}
impl<E, T, A: marker::Send + 'static> Chan<E, Recv<A, T>> {
/// Receives a value of type `A` from the channel. Returns a tuple
/// containing the resulting channel and the received value.
pub fn recv(self) -> (Chan<E, T>, A) {
let v = unsafe_read_chan(&self);
(unsafe { transmute(self) }, v)
}
}
impl<E, R, S> Chan<E, Choose<R, S>> {
/// Perform an active choice, selecting protocol `R`.
pub fn sel1(self) -> Chan<E, R> {
unsafe_write_chan(&self, true);
unsafe { transmute(self) }
}
/// Perform an active choice, selecting protocol `S`.
pub fn sel2(self) -> Chan<E, S> {
unsafe_write_chan(&self, false);
unsafe { transmute(self) }
}
}
/// Convenience function. This is identical to `.sel2()`
impl<Z, A, B> Chan<Z, Choose<A, B>> {
pub fn skip(self) -> Chan<Z, B> {
self.sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2()`
impl<Z, A, B, C> Chan<Z, Choose<A, Choose<B, C>>> {
pub fn skip2(self) -> Chan<Z, C> {
self.sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2()`
impl<Z, A, B, C, D> Chan<Z, Choose<A, Choose<B, Choose<C, D>>>> {
pub fn skip3(self) -> Chan<Z, D> {
self.sel2().sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2().sel2()`
impl<Z, A, B, C, D, E> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, E>>>>> {
pub fn skip4(self) -> Chan<Z, E> {
self.sel2().sel2().sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2().sel2().sel2()`
impl<Z, A, B, C, D, E, F> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, F>>>>>> {
pub fn skip5(self) -> Chan<Z, F> {
self.sel2().sel2().sel2().sel2().sel2()
}
}
/// Convenience function.
impl<Z, A, B, C, D, E, F, G> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, Choose<F, G>>>>>>> {
pub fn skip6(self) -> Chan<Z, G> {
self.sel2().sel2().sel2().sel2().sel2().sel2()
}
}
/// Convenience function.
impl<Z, A, B, C, D, E, F, G, H> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, Choose<F, Choose<G, H>>>>>>>> {
pub fn skip7(self) -> Chan<Z, H> {
self.sel2().sel2().sel2().sel2().sel2().sel2().sel2()
}
}
impl<E, R, S> Chan<E, Offer<R, S>> {
/// Passive choice. This allows the other end of the channel to select one
/// of two options for continuing the protocol: either `R` or `S`.
pub fn offer(self) -> Result<Chan<E, R>, Chan<E, S>> {
let b = unsafe_read_chan(&self);
if b {
Ok(unsafe { transmute(self) })
} else {
Err(unsafe { transmute(self) })
}
}
}
impl<E, R> Chan<E, Rec<R>> {
/// Enter a recursive environment, putting the current environment on the
/// top of the environment stack.
pub fn enter(self) -> Chan<(R, E), R> {
unsafe { transmute(self) }
}
}
impl<E, R> Chan<(R, E), Var<Z>> {
/// Recurse to the environment on the top of the environment stack.
pub fn zero(self) -> Chan<(R, E), R> {
unsafe { transmute(self) }
}
}
impl<E, R, V> Chan<(R, E), Var<S<V>>> {
/// Pop the top environment from the environment stack.
pub fn succ(self) -> Chan<E, Var<V>> {
unsafe { transmute(self) }
}
}
/// Homogeneous select. We have a vector of channels, all obeying the same
/// protocol (and in the exact same point of the protocol), wait for one of them
/// to receive. Removes the receiving channel from the vector and returns both
/// the channel and the new vector.
pub fn hselect<E, P, A>(mut chans: Vec<Chan<E, Recv<A, P>>>)
-> (Chan<E, Recv<A, P>>, Vec<Chan<E, Recv<A, P>>>)
{
let i = iselect(&chans);
let c = chans.remove(i);
(c, chans)
}
/// An alternative version of homogeneous select, returning the index of the Chan
/// that is ready to receive.
pub fn iselect<E, P, A>(chans: &Vec<Chan<E, Recv<A, P>>>) -> usize {
let mut map = HashMap::new();
let id = {
let sel = Select::new();
let mut handles = Vec::with_capacity(chans.len()); // collect all the handles
for (i, chan) in chans.iter().enumerate() {
let &Chan(_, ref rx, _) = chan;
let handle = sel.handle(rx);
map.insert(handle.id(), i);
handles.push(handle);
}
for handle in handles.iter_mut() { // Add
unsafe { handle.add(); }
}
let id = sel.wait();
for handle in handles.iter_mut() { // Clean up
unsafe { handle.remove(); }
}
id
};
map.remove(&id).unwrap()
}
/// Heterogeneous selection structure for channels
///
/// This builds a structure of channels that we wish to select over. This is
/// structured in a way such that the channels selected over cannot be
/// interacted with (consumed) as long as the borrowing ChanSelect object
/// exists. This is necessary to ensure memory safety and should not pose an
///
/// The type parameter T is a return type, ie we store a value of some type T
/// that is returned in case its associated channels is selected on `wait()`
pub struct ChanSelect<'c, T> {
chans: Vec<(&'c Chan<(), ()>, T)>,
}
impl<'c, T> ChanSelect<'c, T> {
pub fn new() -> ChanSelect<'c, T> {
ChanSelect {
chans: Vec::new()
}
}
/// Add a channel whose next step is `Recv`
///
/// Once a channel has been added it cannot be interacted with as long as it
/// is borrowed here (by virtue of move semantics).
pub fn add_recv_ret<E, R, A: marker::Send>(&mut self,
chan: &'c Chan<E, Recv<A, R>>,
ret: T)
{
self.chans.push((unsafe { transmute(chan) }, ret));
}
pub fn add_offer_ret<E, R, S>(&mut self,
chan: &'c Chan<E, Offer<R, S>>,
ret: T)
{
self.chans.push((unsafe { transmute(chan) }, ret));
}
/// Find a Receiver (and hence a Chan) that is ready to receive.
///
/// This method consumes the ChanSelect, freeing up the borrowed Receivers
/// to be consumed.
pub fn wait(self) -> T {
let sel = Select::new();
let mut handles = Vec::with_capacity(self.chans.len());
let mut map = HashMap::new();
for (chan, ret) in self.chans.into_iter() {
let &Chan(_, ref rx, _) = chan;
let h = sel.handle(rx);
let id = h.id();
map.insert(id, ret);
handles.push(h);
}
for handle in handles.iter_mut() {
unsafe { handle.add(); }
}
let id = sel.wait();
for handle in handles.iter_mut() {
unsafe { handle.remove(); }
}
map.remove(&id).unwrap()
}
/// How many channels are there in the structure?
pub fn len(&self) -> usize {
self.chans.len()
}
}
impl<'c> ChanSelect<'c, usize> {
pub fn add_recv<E, R, A: marker::Send>(&mut self,
c: &'c Chan<E, Recv<A, R>>)
{
let index = self.chans.len();
self.add_recv_ret(c, index);
}
pub fn add_offer<E, R, S>(&mut self,
c: &'c Chan<E, Offer<R, S>>)
{
let index = self.chans.len();
self.add_offer_ret(c, index);
}
}
/// Sets up an session typed communication channel. Should be paired with
/// `request` for the corresponding client.
pub fn accept<E, R>(tx: Sender<Chan<E, R>>) -> Option<Chan<E, R>> {
borrow_accept(&tx)
}
pub fn borrow_accept<E, R>(tx: &Sender<Chan<E, R>>) -> Option<Chan<E, R>> {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let c1 = Chan(tx1, rx2, PhantomData);
let c2 = Chan(tx2, rx1, PhantomData);
match tx.send(c1) {
Ok(_) => Some(c2),
_ => None
}
}
/// Sets up an session typed communication channel. Should be paired with
/// `accept` for the corresponding server.
pub fn request<E, R: HasDual>(rx: Receiver<Chan<E, R>>) -> Option<Chan<E, R::Dual>> {
borrow_request(&rx)
}
pub fn borrow_request<E, R: HasDual>(rx: &Receiver<Chan<E, R>>) -> Option<Chan<E, R::Dual>> {
match rx.recv() {
Ok(Chan(tx, rx, _)) => Some(Chan(tx, rx, PhantomData)),
_ => None
}
}
/// Returns two session channels
pub fn session_channel<R: HasDual>() -> (Chan<(), R>, Chan<(), R::Dual>) {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let c1 = Chan(tx1, rx2, PhantomData);
let c2 = Chan(tx2, rx1, PhantomData);
(c1, c2)
}
/// Connect two functions using a session typed channel.
pub fn connect<E, F1, F2, R>(srv: F1, cli: F2)
where F1: Fn(Chan<E, R>) + marker::Send,
F2: Fn(Chan<E, R::Dual>) + marker::Send,
E: marker::Send + 'static,
R: HasDual + marker::Send + 'static
{
let (tx, rx) = channel();
let jg = scoped(move|| srv(accept(tx).unwrap()));
cli(request(rx).unwrap());
jg.join();
}
/// This macro is convenient for server-like protocols of the form:
///
/// `Offer<A, Offer<B, Offer<C, ... >>>`
///
/// # Examples
///
/// Assume we have a protocol `Offer<Recv<u64, Eps>, Offer<Recv<String, Eps>,Eps>>>`
/// we can use the `offer!` macro as follows:
///
/// ```rust
/// #[macro_use] extern crate rust_sessions;
/// use rust_sessions::*;
/// use std::thread::spawn;
///
/// fn srv(c: Chan<(), Offer<Recv<u64, Eps>, Offer<Recv<String, Eps>, Eps>>>) {
/// offer! { c,
/// Number => {
/// let (c, n) = c.recv();
/// assert_eq!(42, n);
/// c.close();
/// },
/// String => {
/// c.recv().0.close();
/// },
/// Quit => {
/// c.close();
/// }
/// }
/// }
///
/// fn cli(c: Chan<(), Choose<Send<u64, Eps>, Choose<Send<String, Eps>, Eps>>>) {
/// c.sel1().send(42).close();
/// }
///
/// fn main() {
/// let (s, c) = session_channel();
/// spawn(move|| cli(c));
/// srv(s);
/// }
/// ```
///
/// The identifiers on the left-hand side of the arrows have no semantic
/// meaning, they only provide a meaningful name for the reader.
#[macro_export]
macro_rules! offer {
(
$id:ident, $branch:ident => $code:expr, $($t:tt)+
) => (
match $id.offer() {
Ok($id) => $code,
Err($id) => offer!{ $id, $($t)+ }
}
);
(
$id:ident, $branch:ident => $code:expr
) => (
$code
)
}
/// This macro plays the same role as the `select!` macro does for `Receiver`s.
///
/// It also supports a second form with `Offer`s (see the example below).
///
/// # Examples
///
/// ```rust
/// #[macro_use] extern crate rust_sessions;
/// use rust_sessions::*;
/// use std::thread::spawn;
///
/// fn send_str(c: Chan<(), Send<String, Eps>>) {
/// c.send("Hello, World!".to_string()).close();
/// }
///
/// fn send_usize(c: Chan<(), Send<usize, Eps>>) {
/// c.send(42).close();
/// }
///
/// fn main() {
/// let (tcs, rcs) = session_channel();
/// let (tcu, rcu) = session_channel();
///
/// // Spawn threads
/// spawn(move|| send_str(tcs));
/// spawn(move|| send_usize(tcu));
///
/// loop {
/// chan_select! {
/// (c, s) = rcs.recv() => {
/// assert_eq!("Hello, World!".to_string(), s);
/// c.close();
/// break
/// },
/// (c, i) = rcu.recv() => {
/// assert_eq!(42, i);
/// c.close();
/// break
/// }
/// }
/// }
/// }
/// ```
///
/// ```rust
/// #[macro_use]
/// extern crate rust_sessions;
/// extern crate rand;
///
/// use std::thread::spawn;
/// use rust_sessions::*;
///
/// type Igo = Choose<Send<String, Eps>, Send<u64, Eps>>;
/// type Ugo = Offer<Recv<String, Eps>, Recv<u64, Eps>>;
///
/// fn srv(chan_one: Chan<(), Ugo>, chan_two: Chan<(), Ugo>) {
/// let _ign;
/// chan_select! {
/// _ign = chan_one.offer() => {
/// String => {
/// let (c, s) = chan_one.recv();
/// assert_eq!("Hello, World!".to_string(), s);
/// c.close();
/// },
/// Number => {
/// unreachable!()
/// }
/// },
/// _ign = chan_two.offer() => {
/// String => {
/// unreachable!()
/// },
/// Number => {
/// unreachable!()
/// }
/// }
/// }
/// }
///
/// fn cli(c: Chan<(), Igo>) {
/// c.sel1().send("Hello, World!".to_string()).close();
/// }
///
/// fn main() {
/// let (ca1, ca2) = session_channel();
/// let (cb1, cb2) = session_channel();
///
/// spawn(move|| cli(ca2));
///
/// srv(ca1, cb1);
/// }
/// ```
#[macro_export]
macro_rules! chan_select {
(
$(($c:ident, $name:pat) = $rx:ident.recv() => $code:expr),+
) => ({
let index = {
let mut sel = $crate::ChanSelect::new();
$( sel.add_recv(&$rx); )+
sel.wait()
};
let mut i = 0;
$( if index == { i += 1; i - 1 } { let ($c, $name) = $rx.recv(); $code } else )+
{ unreachable!() }
});
(
$($res:ident = $rx:ident.offer() => { $($t:tt)+ }),+
) => ({
let index = {
let mut sel = $crate::ChanSelect::new();
$( sel.add_offer(&$rx); )+
sel.wait()
};
let mut i = 0;
$( if index == { i += 1; i - 1 } { $res = offer!{ $rx, $($t)+ } } else )+
{ unreachable!() }
})
}
Add comment on why Chan is destructured in borrow_reuest()
//! rust-sessions
//!
//! This is an implementation of *session types* in Rust.
//!
//! The channels in Rusts standard library are useful for a great many things,
//! but they're restricted to a single type. Session types allows one to use a
//! single channel for transferring values of different types, depending on the
//! context in which it is used. Specifically, a session typed channel always
//! carry a *protocol*, which dictates how communication is to take place.
//!
//! For example, imagine that two threads, `A` and `B` want to communicate with
//! the following pattern:
//!
//! 1. `A` sends an integer to `B`.
//! 2. `B` sends a boolean to `A` depending on the integer received.
//!
//! With session types, this could be done by sharing a single channel. From
//! `A`'s point of view, it would have the type `int ! (bool ? eps)` where `t ! r`
//! is the protocol "send something of type `t` then proceed with
//! protocol `r`", the protocol `t ? r` is "receive something of type `t` then proceed
//! with protocol `r`, and `eps` is a special marker indicating the end of a
//! communication session.
//!
//! Our session type library allows the user to create channels that adhere to a
//! specified protocol. For example, a channel like the above would have the type
//! `Chan<(), Send<i64, Recv<bool, Eps>>>`, and the full program could look like this:
//!
//! ```
//! extern crate rust_sessions;
//! use rust_sessions::*;
//!
//! type Server = Recv<i64, Send<bool, Eps>>;
//! type Client = Send<i64, Recv<bool, Eps>>;
//!
//! fn srv(c: Chan<(), Server>) {
//! let (c, n) = c.recv();
//! if n % 2 == 0 {
//! c.send(true).close()
//! } else {
//! c.send(false).close()
//! }
//! }
//!
//! fn cli(c: Chan<(), Client>) {
//! let n = 42;
//! let c = c.send(n);
//! let (c, b) = c.recv();
//!
//! if b {
//! println!("{} is even", n);
//! } else {
//! println!("{} is odd", n);
//! }
//!
//! c.close();
//! }
//!
//! fn main() {
//! connect(srv, cli);
//! }
//! ```
#![feature(std_misc)]
#![feature(scoped)]
use std::marker;
use std::thread::scoped;
use std::mem::transmute;
use std::sync::mpsc::{Sender, Receiver, channel};
use std::collections::HashMap;
use std::sync::mpsc::Select;
use std::marker::PhantomData;
/// A session typed channel. `T` is the protocol and `E` is the environment,
/// containing potential recursion targets
pub struct Chan<E, T> (Sender<Box<u8>>, Receiver<Box<u8>>, PhantomData<(E, T)>);
fn unsafe_write_chan<A: marker::Send + 'static, E, T>
(&Chan(ref tx, _, _): &Chan<E, T>, x: A)
{
let tx: &Sender<Box<A>> = unsafe { transmute(tx) };
tx.send(Box::new(x)).unwrap();
}
fn unsafe_read_chan<A: marker::Send + 'static, E, T>
(&Chan(_, ref rx, _): &Chan<E, T>) -> A
{
let rx: &Receiver<Box<A>> = unsafe { transmute(rx) };
*rx.recv().unwrap()
}
/// Peano numbers: Zero
#[allow(missing_copy_implementations)]
pub struct Z;
/// Peano numbers: Plus one
pub struct S<P> ( PhantomData<P> );
/// End of communication session (epsilon)
#[allow(missing_copy_implementations)]
pub struct Eps;
/// Receive `A`, then `R`
pub struct Recv<A,R> ( PhantomData<(A, R)> );
/// Send `A`, then `R`
pub struct Send<A,R> ( PhantomData<(A, R)> );
/// Active choice between `R` and `S`
pub struct Choose<R,S> ( PhantomData<(R, S)> );
/// Passive choice (offer) between `R` and `S`
pub struct Offer<R,S> ( PhantomData<(R, S)> );
/// Enter a recursive environment
pub struct Rec<R> ( PhantomData<R> );
/// Recurse. V indicates how many layers of the recursive environment we recurse
/// out of.
pub struct Var<V> ( PhantomData<V> );
pub unsafe trait HasDual {
type Dual;
}
unsafe impl HasDual for Eps {
type Dual = Eps;
}
unsafe impl <A, T: HasDual> HasDual for Send<A, T> {
type Dual = Recv<A, T::Dual>;
}
unsafe impl <A, T: HasDual> HasDual for Recv<A, T> {
type Dual = Send<A, T::Dual>;
}
unsafe impl <R: HasDual, S: HasDual> HasDual for Choose<R, S> {
type Dual = Offer<R::Dual, S::Dual>;
}
unsafe impl <R: HasDual, S: HasDual> HasDual for Offer<R, S> {
type Dual = Choose<R::Dual, S::Dual>;
}
unsafe impl HasDual for Var<Z> {
type Dual = Var<Z>;
}
unsafe impl <N> HasDual for Var<S<N>> {
type Dual = Var<S<N>>; // TODO bound on N?
}
unsafe impl <T: HasDual> HasDual for Rec<T> {
type Dual = Rec<T::Dual>;
}
impl<E> Chan<E, Eps> {
/// Close a channel. Should always be used at the end of your program.
pub fn close(self) {
// Consume `c`
}
}
impl<E, T, A: marker::Send + 'static> Chan<E, Send<A, T>> {
/// Send a value of type `A` over the channel. Returns a channel with
/// protocol `T`
pub fn send(self, v: A) -> Chan<E, T> {
unsafe_write_chan(&self, v);
unsafe { transmute(self) }
}
}
impl<E, T, A: marker::Send + 'static> Chan<E, Recv<A, T>> {
/// Receives a value of type `A` from the channel. Returns a tuple
/// containing the resulting channel and the received value.
pub fn recv(self) -> (Chan<E, T>, A) {
let v = unsafe_read_chan(&self);
(unsafe { transmute(self) }, v)
}
}
impl<E, R, S> Chan<E, Choose<R, S>> {
/// Perform an active choice, selecting protocol `R`.
pub fn sel1(self) -> Chan<E, R> {
unsafe_write_chan(&self, true);
unsafe { transmute(self) }
}
/// Perform an active choice, selecting protocol `S`.
pub fn sel2(self) -> Chan<E, S> {
unsafe_write_chan(&self, false);
unsafe { transmute(self) }
}
}
/// Convenience function. This is identical to `.sel2()`
impl<Z, A, B> Chan<Z, Choose<A, B>> {
pub fn skip(self) -> Chan<Z, B> {
self.sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2()`
impl<Z, A, B, C> Chan<Z, Choose<A, Choose<B, C>>> {
pub fn skip2(self) -> Chan<Z, C> {
self.sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2()`
impl<Z, A, B, C, D> Chan<Z, Choose<A, Choose<B, Choose<C, D>>>> {
pub fn skip3(self) -> Chan<Z, D> {
self.sel2().sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2().sel2()`
impl<Z, A, B, C, D, E> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, E>>>>> {
pub fn skip4(self) -> Chan<Z, E> {
self.sel2().sel2().sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2().sel2().sel2()`
impl<Z, A, B, C, D, E, F> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, F>>>>>> {
pub fn skip5(self) -> Chan<Z, F> {
self.sel2().sel2().sel2().sel2().sel2()
}
}
/// Convenience function.
impl<Z, A, B, C, D, E, F, G> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, Choose<F, G>>>>>>> {
pub fn skip6(self) -> Chan<Z, G> {
self.sel2().sel2().sel2().sel2().sel2().sel2()
}
}
/// Convenience function.
impl<Z, A, B, C, D, E, F, G, H> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, Choose<F, Choose<G, H>>>>>>>> {
pub fn skip7(self) -> Chan<Z, H> {
self.sel2().sel2().sel2().sel2().sel2().sel2().sel2()
}
}
impl<E, R, S> Chan<E, Offer<R, S>> {
/// Passive choice. This allows the other end of the channel to select one
/// of two options for continuing the protocol: either `R` or `S`.
pub fn offer(self) -> Result<Chan<E, R>, Chan<E, S>> {
let b = unsafe_read_chan(&self);
if b {
Ok(unsafe { transmute(self) })
} else {
Err(unsafe { transmute(self) })
}
}
}
impl<E, R> Chan<E, Rec<R>> {
/// Enter a recursive environment, putting the current environment on the
/// top of the environment stack.
pub fn enter(self) -> Chan<(R, E), R> {
unsafe { transmute(self) }
}
}
impl<E, R> Chan<(R, E), Var<Z>> {
/// Recurse to the environment on the top of the environment stack.
pub fn zero(self) -> Chan<(R, E), R> {
unsafe { transmute(self) }
}
}
impl<E, R, V> Chan<(R, E), Var<S<V>>> {
/// Pop the top environment from the environment stack.
pub fn succ(self) -> Chan<E, Var<V>> {
unsafe { transmute(self) }
}
}
/// Homogeneous select. We have a vector of channels, all obeying the same
/// protocol (and in the exact same point of the protocol), wait for one of them
/// to receive. Removes the receiving channel from the vector and returns both
/// the channel and the new vector.
pub fn hselect<E, P, A>(mut chans: Vec<Chan<E, Recv<A, P>>>)
-> (Chan<E, Recv<A, P>>, Vec<Chan<E, Recv<A, P>>>)
{
let i = iselect(&chans);
let c = chans.remove(i);
(c, chans)
}
/// An alternative version of homogeneous select, returning the index of the Chan
/// that is ready to receive.
pub fn iselect<E, P, A>(chans: &Vec<Chan<E, Recv<A, P>>>) -> usize {
let mut map = HashMap::new();
let id = {
let sel = Select::new();
let mut handles = Vec::with_capacity(chans.len()); // collect all the handles
for (i, chan) in chans.iter().enumerate() {
let &Chan(_, ref rx, _) = chan;
let handle = sel.handle(rx);
map.insert(handle.id(), i);
handles.push(handle);
}
for handle in handles.iter_mut() { // Add
unsafe { handle.add(); }
}
let id = sel.wait();
for handle in handles.iter_mut() { // Clean up
unsafe { handle.remove(); }
}
id
};
map.remove(&id).unwrap()
}
/// Heterogeneous selection structure for channels
///
/// This builds a structure of channels that we wish to select over. This is
/// structured in a way such that the channels selected over cannot be
/// interacted with (consumed) as long as the borrowing ChanSelect object
/// exists. This is necessary to ensure memory safety and should not pose an
///
/// The type parameter T is a return type, ie we store a value of some type T
/// that is returned in case its associated channels is selected on `wait()`
pub struct ChanSelect<'c, T> {
chans: Vec<(&'c Chan<(), ()>, T)>,
}
impl<'c, T> ChanSelect<'c, T> {
pub fn new() -> ChanSelect<'c, T> {
ChanSelect {
chans: Vec::new()
}
}
/// Add a channel whose next step is `Recv`
///
/// Once a channel has been added it cannot be interacted with as long as it
/// is borrowed here (by virtue of move semantics).
pub fn add_recv_ret<E, R, A: marker::Send>(&mut self,
chan: &'c Chan<E, Recv<A, R>>,
ret: T)
{
self.chans.push((unsafe { transmute(chan) }, ret));
}
pub fn add_offer_ret<E, R, S>(&mut self,
chan: &'c Chan<E, Offer<R, S>>,
ret: T)
{
self.chans.push((unsafe { transmute(chan) }, ret));
}
/// Find a Receiver (and hence a Chan) that is ready to receive.
///
/// This method consumes the ChanSelect, freeing up the borrowed Receivers
/// to be consumed.
pub fn wait(self) -> T {
let sel = Select::new();
let mut handles = Vec::with_capacity(self.chans.len());
let mut map = HashMap::new();
for (chan, ret) in self.chans.into_iter() {
let &Chan(_, ref rx, _) = chan;
let h = sel.handle(rx);
let id = h.id();
map.insert(id, ret);
handles.push(h);
}
for handle in handles.iter_mut() {
unsafe { handle.add(); }
}
let id = sel.wait();
for handle in handles.iter_mut() {
unsafe { handle.remove(); }
}
map.remove(&id).unwrap()
}
/// How many channels are there in the structure?
pub fn len(&self) -> usize {
self.chans.len()
}
}
impl<'c> ChanSelect<'c, usize> {
pub fn add_recv<E, R, A: marker::Send>(&mut self,
c: &'c Chan<E, Recv<A, R>>)
{
let index = self.chans.len();
self.add_recv_ret(c, index);
}
pub fn add_offer<E, R, S>(&mut self,
c: &'c Chan<E, Offer<R, S>>)
{
let index = self.chans.len();
self.add_offer_ret(c, index);
}
}
/// Sets up an session typed communication channel. Should be paired with
/// `request` for the corresponding client.
pub fn accept<E, R>(tx: Sender<Chan<E, R>>) -> Option<Chan<E, R>> {
borrow_accept(&tx)
}
pub fn borrow_accept<E, R>(tx: &Sender<Chan<E, R>>) -> Option<Chan<E, R>> {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let c1 = Chan(tx1, rx2, PhantomData);
let c2 = Chan(tx2, rx1, PhantomData);
match tx.send(c1) {
Ok(_) => Some(c2),
_ => None
}
}
/// Sets up an session typed communication channel. Should be paired with
/// `accept` for the corresponding server.
pub fn request<E, R: HasDual>(rx: Receiver<Chan<E, R>>) -> Option<Chan<E, R::Dual>> {
borrow_request(&rx)
}
pub fn borrow_request<E, R: HasDual>(rx: &Receiver<Chan<E, R>>) -> Option<Chan<E, R::Dual>> {
match rx.recv() {
// TODO Change to a normal transmute once
// https://github.com/rust-lang/rust/issues/24459
// has been addressed.
Ok(Chan(tx, rx, _)) => Some(Chan(tx, rx, PhantomData)),
_ => None
}
}
/// Returns two session channels
pub fn session_channel<R: HasDual>() -> (Chan<(), R>, Chan<(), R::Dual>) {
let (tx1, rx1) = channel();
let (tx2, rx2) = channel();
let c1 = Chan(tx1, rx2, PhantomData);
let c2 = Chan(tx2, rx1, PhantomData);
(c1, c2)
}
/// Connect two functions using a session typed channel.
pub fn connect<E, F1, F2, R>(srv: F1, cli: F2)
where F1: Fn(Chan<E, R>) + marker::Send,
F2: Fn(Chan<E, R::Dual>) + marker::Send,
E: marker::Send + 'static,
R: HasDual + marker::Send + 'static
{
let (tx, rx) = channel();
let jg = scoped(move|| srv(accept(tx).unwrap()));
cli(request(rx).unwrap());
jg.join();
}
/// This macro is convenient for server-like protocols of the form:
///
/// `Offer<A, Offer<B, Offer<C, ... >>>`
///
/// # Examples
///
/// Assume we have a protocol `Offer<Recv<u64, Eps>, Offer<Recv<String, Eps>,Eps>>>`
/// we can use the `offer!` macro as follows:
///
/// ```rust
/// #[macro_use] extern crate rust_sessions;
/// use rust_sessions::*;
/// use std::thread::spawn;
///
/// fn srv(c: Chan<(), Offer<Recv<u64, Eps>, Offer<Recv<String, Eps>, Eps>>>) {
/// offer! { c,
/// Number => {
/// let (c, n) = c.recv();
/// assert_eq!(42, n);
/// c.close();
/// },
/// String => {
/// c.recv().0.close();
/// },
/// Quit => {
/// c.close();
/// }
/// }
/// }
///
/// fn cli(c: Chan<(), Choose<Send<u64, Eps>, Choose<Send<String, Eps>, Eps>>>) {
/// c.sel1().send(42).close();
/// }
///
/// fn main() {
/// let (s, c) = session_channel();
/// spawn(move|| cli(c));
/// srv(s);
/// }
/// ```
///
/// The identifiers on the left-hand side of the arrows have no semantic
/// meaning, they only provide a meaningful name for the reader.
#[macro_export]
macro_rules! offer {
(
$id:ident, $branch:ident => $code:expr, $($t:tt)+
) => (
match $id.offer() {
Ok($id) => $code,
Err($id) => offer!{ $id, $($t)+ }
}
);
(
$id:ident, $branch:ident => $code:expr
) => (
$code
)
}
/// This macro plays the same role as the `select!` macro does for `Receiver`s.
///
/// It also supports a second form with `Offer`s (see the example below).
///
/// # Examples
///
/// ```rust
/// #[macro_use] extern crate rust_sessions;
/// use rust_sessions::*;
/// use std::thread::spawn;
///
/// fn send_str(c: Chan<(), Send<String, Eps>>) {
/// c.send("Hello, World!".to_string()).close();
/// }
///
/// fn send_usize(c: Chan<(), Send<usize, Eps>>) {
/// c.send(42).close();
/// }
///
/// fn main() {
/// let (tcs, rcs) = session_channel();
/// let (tcu, rcu) = session_channel();
///
/// // Spawn threads
/// spawn(move|| send_str(tcs));
/// spawn(move|| send_usize(tcu));
///
/// loop {
/// chan_select! {
/// (c, s) = rcs.recv() => {
/// assert_eq!("Hello, World!".to_string(), s);
/// c.close();
/// break
/// },
/// (c, i) = rcu.recv() => {
/// assert_eq!(42, i);
/// c.close();
/// break
/// }
/// }
/// }
/// }
/// ```
///
/// ```rust
/// #[macro_use]
/// extern crate rust_sessions;
/// extern crate rand;
///
/// use std::thread::spawn;
/// use rust_sessions::*;
///
/// type Igo = Choose<Send<String, Eps>, Send<u64, Eps>>;
/// type Ugo = Offer<Recv<String, Eps>, Recv<u64, Eps>>;
///
/// fn srv(chan_one: Chan<(), Ugo>, chan_two: Chan<(), Ugo>) {
/// let _ign;
/// chan_select! {
/// _ign = chan_one.offer() => {
/// String => {
/// let (c, s) = chan_one.recv();
/// assert_eq!("Hello, World!".to_string(), s);
/// c.close();
/// },
/// Number => {
/// unreachable!()
/// }
/// },
/// _ign = chan_two.offer() => {
/// String => {
/// unreachable!()
/// },
/// Number => {
/// unreachable!()
/// }
/// }
/// }
/// }
///
/// fn cli(c: Chan<(), Igo>) {
/// c.sel1().send("Hello, World!".to_string()).close();
/// }
///
/// fn main() {
/// let (ca1, ca2) = session_channel();
/// let (cb1, cb2) = session_channel();
///
/// spawn(move|| cli(ca2));
///
/// srv(ca1, cb1);
/// }
/// ```
#[macro_export]
macro_rules! chan_select {
(
$(($c:ident, $name:pat) = $rx:ident.recv() => $code:expr),+
) => ({
let index = {
let mut sel = $crate::ChanSelect::new();
$( sel.add_recv(&$rx); )+
sel.wait()
};
let mut i = 0;
$( if index == { i += 1; i - 1 } { let ($c, $name) = $rx.recv(); $code } else )+
{ unreachable!() }
});
(
$($res:ident = $rx:ident.offer() => { $($t:tt)+ }),+
) => ({
let index = {
let mut sel = $crate::ChanSelect::new();
$( sel.add_offer(&$rx); )+
sel.wait()
};
let mut i = 0;
$( if index == { i += 1; i - 1 } { $res = offer!{ $rx, $($t)+ } } else )+
{ unreachable!() }
})
}
|
// Copyright 2015 Corey Farwell
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate xml;
use std::ascii::AsciiExt;
use std::io;
use xml::{Element, ElementBuilder, Parser, Xml};
trait ElementUtils {
fn tag_with_text(&mut self, child_name: &'static str, child_body: &str);
fn tag_with_optional_text(&mut self, child_name: &'static str, child_body: &Option<String>);
}
impl ElementUtils for Element {
fn tag_with_text(&mut self, child_name: &'static str, child_body: &str) {
self.tag(elem_with_text(child_name, child_body));
}
fn tag_with_optional_text(&mut self, child_name: &'static str, child_body: &Option<String>) {
if let Some(ref c) = *child_body {
self.tag_with_text(child_name, &c);
}
}
}
fn elem_with_text(tag_name: &'static str, chars: &str) -> Element {
let mut elem = Element::new(tag_name.to_string(), None, vec![]);
elem.text(chars.to_string());
elem
}
trait ViaXml {
fn to_xml(&self) -> Element;
fn from_xml(element: Element) -> Result<Self, &'static str>;
}
/// RSS
///
/// "At the top level, a RSS document is a \<rss\> element, with a mandatory attribute called
/// version, that specifies the version of RSS that the document conforms to. If it conforms to
/// this specification, the version attribute must be 2.0."
///
/// [RSS 2.0 Specification § RSS]
/// (http://cyber.law.harvard.edu/rss/rss.html#whatIsRss)
#[derive(Default)]
pub struct Rss(pub Channel);
impl ViaXml for Rss {
fn to_xml(&self) -> Element {
let mut rss = Element::new("rss".to_string(), None, vec![("version".to_string(), None, "2.0".to_string())]);
let &Rss(ref channel) = self;
rss.tag(channel.to_xml());
rss
}
fn from_xml(rss_elem: Element) -> Result<Self, &'static str> {
if rss_elem.name.to_ascii_lowercase() != "rss" {
panic!("Expected <rss>, found <{}>", rss_elem.name);
}
let channel_elem = match rss_elem.get_child("channel", None) {
Some(elem) => elem,
None => return Err("No <channel> element found in <rss>"),
};
let channel = try!(ViaXml::from_xml(channel_elem.clone()));
Ok(Rss(channel))
}
}
impl Rss {
pub fn to_string(&self) -> String {
let mut ret = format!("{}", Xml::PINode("xml version='1.0' encoding='UTF-8'".to_string()));
ret.push_str(&format!("{}", self.to_xml()));
ret
}
pub fn from_reader(reader: &mut io::Read) -> Result<Self, &'static str> {
let mut rss_string = String::new();
if let Err(..) = reader.read_to_string(&mut rss_string) {
return Err("Error reading string from reader");
}
let mut parser = Parser::new();
parser.feed_str(&rss_string);
let mut builder = ElementBuilder::new();
for event in parser {
if let Some(Ok(element)) = builder.handle_event(event) {
return ViaXml::from_xml(element);
}
}
Err("RSS read error")
}
}
/// Channel
///
/// "Subordinate to the \<rss\> element is a single \<channel\> element, which contains information
/// about the channel (metadata) and its contents."
///
/// [RSS 2.0 Specification § Channel]
/// (http://cyber.law.harvard.edu/rss/rss.html#requiredChannelElements)
///
/// ## Examples
///
/// ```
/// use rss::Channel;
/// use std::default::Default;
///
/// let channel = Channel {
/// title: "My Blog".to_string(),
/// link: "http://myblog.com".to_string(),
/// description: "Where I write stuff".to_string(),
/// items: vec![],
/// ..Default::default()
/// };
/// ```
#[derive(Default)]
pub struct Channel {
pub title: String,
pub link: String,
pub description: String,
pub items: Vec<Item>,
pub language: Option<String>,
pub copyright: Option<String>,
pub managing_editor: Option<String>,
pub web_master: Option<String>,
pub pub_date: Option<String>,
pub last_build_date: Option<String>,
pub categories: Vec<Category>,
pub generator: Option<String>,
pub docs: Option<String>,
// pub cloud:
pub ttl: Option<String>, // TODO: change this to Option<i32>?
pub image: Option<String>,
pub rating: Option<String>,
// pub text_input:
pub skip_hours: Option<String>,
pub skip_days: Option<String>,
}
impl ViaXml for Channel {
fn to_xml(&self) -> Element {
let mut channel = Element::new("channel".to_string(), None, vec![]);
channel.tag_with_text("title", &self.title);
channel.tag_with_text("link", &self.link);
channel.tag_with_text("description", &self.description);
for item in &self.items {
channel.tag(item.to_xml());
}
channel.tag_with_optional_text("language", &self.language);
channel.tag_with_optional_text("copyright", &self.copyright);
channel.tag_with_optional_text("managingEditor", &self.managing_editor);
channel.tag_with_optional_text("webMaster", &self.web_master);
channel.tag_with_optional_text("pubDate", &self.pub_date);
channel.tag_with_optional_text("lastBuildDate", &self.last_build_date);
channel.tag_with_optional_text("generator", &self.generator);
channel.tag_with_optional_text("docs", &self.docs);
channel.tag_with_optional_text("ttl", &self.ttl);
channel.tag_with_optional_text("image", &self.image);
channel.tag_with_optional_text("rating", &self.rating);
channel.tag_with_optional_text("skipHours", &self.skip_hours);
channel.tag_with_optional_text("skipDays", &self.skip_days);
for category in &self.categories {
channel.tag(category.to_xml());
}
channel
}
fn from_xml(element: Element) -> Result<Self, &'static str> {
let title = match element.get_child("title", None) {
Some(element) => element.content_str(),
None => return Err("<channel> is missing required <title> element"),
};
let link = match element.get_child("link", None) {
Some(element) => element.content_str(),
None => return Err("<channel> is missing required <link> element"),
};
let description = match element.get_child("description", None) {
Some(element) => element.content_str(),
None => return Err("<channel> is missing required <description> element"),
};
let items = element.get_children("item", None)
.map(|e| ViaXml::from_xml(e.clone()).unwrap())
.collect();
let language = element.get_child("language", None).map(Element::content_str);
let copyright = element.get_child("copyright", None).map(Element::content_str);
let managing_editor = element.get_child("managing_editor", None).map(Element::content_str);
let web_master = element.get_child("managing_editor", None).map(Element::content_str);
let pub_date = element.get_child("pub_date", None).map(Element::content_str);
let last_build_date = element.get_child("last_build_date", None).map(Element::content_str);
let categories = element.get_children("category", None)
.map(|e| ViaXml::from_xml(e.clone()).unwrap())
.collect();
let generator = element.get_child("generator", None).map(Element::content_str);
let docs = element.get_child("docs", None).map(Element::content_str);
let ttl = element.get_child("ttl", None).map(Element::content_str);
let image = element.get_child("image", None).map(Element::content_str);
let rating = element.get_child("rating", None).map(Element::content_str);
let skip_hours = element.get_child("skip_hours", None).map(Element::content_str);
let skip_days = element.get_child("skip_days", None).map(Element::content_str);
Ok(Channel {
title: title,
link: link,
description: description,
items: items,
language: language,
copyright: copyright,
managing_editor: managing_editor,
web_master: web_master,
pub_date: pub_date,
last_build_date: last_build_date,
categories: categories,
generator: generator,
docs: docs,
ttl: ttl,
image: image,
rating: rating,
skip_hours: skip_hours,
skip_days: skip_days,
})
}
}
/// Item
///
/// [RSS 2.0 Specification § Item]
/// (http://cyber.law.harvard.edu/rss/rss.html#hrelementsOfLtitemgt)
#[derive(Default)]
pub struct Item {
pub title: Option<String>,
pub link: Option<String>,
pub description: Option<String>,
pub author: Option<String>,
pub categories: Vec<Category>,
pub comments: Option<String>,
// pub enclosure
// pub guid
// pub pubDate
// pub source
}
impl ViaXml for Item {
fn to_xml(&self) -> Element {
let mut item = Element::new("item".to_string(), None, vec![]);
item.tag_with_optional_text("title", &self.title);
item.tag_with_optional_text("link", &self.link);
item.tag_with_optional_text("description", &self.description);
item.tag_with_optional_text("author", &self.author);
item.tag_with_optional_text("comments", &self.comments);
for category in &self.categories {
item.tag(category.to_xml());
}
item
}
fn from_xml(element: Element) -> Result<Self, &'static str> {
let title = element.get_child("title", None).map(Element::content_str);
let link = element.get_child("link", None).map(Element::content_str);
let description = element.get_child("description", None).map(Element::content_str);
let author = element.get_child("author", None).map(Element::content_str);
let comments = element.get_child("comments", None).map(Element::content_str);
let categories = element.get_children("category", None)
.map(|e| ViaXml::from_xml(e.clone()).unwrap())
.collect();
Ok(Item {
title: title,
link: link,
description: description,
categories: categories,
author: author,
comments: comments,
})
}
}
/// Category
///
/// [RSS 2.0 Specification § Category]
/// (http://cyber.law.harvard.edu/rss/rss.html#ltcategorygtSubelementOfLtitemgt)
#[derive(Default)]
pub struct Category {
pub domain: Option<String>,
pub value: String,
}
impl ViaXml for Category {
fn to_xml(&self) -> Element {
let mut category = match self.domain {
Some(ref d) => Element::new("category".to_string(), None, vec![("domain".to_string(), None, d.clone())]),
None => Element::new("category".to_string(), None, vec![]),
};
category.text(self.value.clone());
category
}
fn from_xml(elem: Element) -> Result<Self, &'static str> {
let domain = elem.get_attribute("domain", None).map(|s| s.to_string());
let value = elem.content_str();
Ok(Category {
domain: domain,
value: value,
})
}
}
#[cfg(test)]
mod test {
use std::default::Default;
use std::fs::File;
use super::{Rss, Item, Channel};
#[test]
fn test_basic_to_string() {
let item = Item {
title: Some("My first post!".to_string()),
link: Some("http://myblog.com/post1".to_string()),
description: Some("This is my first post".to_string()),
..Default::default()
};
let channel = Channel {
title: "My Blog".to_string(),
link: "http://myblog.com".to_string(),
description: "Where I write stuff".to_string(),
items: vec![item],
..Default::default()
};
let rss = Rss(channel);
assert_eq!(rss.to_string(), "<?xml version=\'1.0\' encoding=\'UTF-8\'?><rss version=\'2.0\'><channel><title>My Blog</title><link>http://myblog.com</link><description>Where I write stuff</description><item><title>My first post!</title><link>http://myblog.com/post1</link><description>This is my first post</description></item></channel></rss>");
}
#[test]
fn test_from_file() {
let mut file = File::open("test-data/pinboard.xml").unwrap();
let rss = Rss::from_reader(&mut file).unwrap();
assert!(rss.to_string().len() > 0);
}
#[test]
fn test_from_read_no_channels() {
let mut rss_bytes = "<rss></rss>".as_bytes();
assert!(Rss::from_reader(&mut rss_bytes).is_err());
}
#[test]
fn test_from_read_one_channel_no_properties() {
let mut rss_bytes = "<rss><channel></channel></rss>".as_bytes();
assert!(Rss::from_reader(&mut rss_bytes).is_err());
}
#[test]
fn test_read_one_channel() {
let rss_bytes = "\
<rss>\
<channel>\
<title>Hello world!</title>\
<description></description>\
<link></link>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_reader(&mut rss_bytes.as_bytes()).unwrap();
assert_eq!("Hello world!", channel.title);
}
// Ensure reader ignores the PI XML node and continues to parse the RSS
#[test]
fn test_read_with_pinode() {
let rss_bytes = "\
<?xml version=\'1.0\' encoding=\'UTF-8\'?>\
<rss>\
<channel>\
<title>Title</title>\
<link></link>\
<description></description>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_reader(&mut rss_bytes.as_bytes()).unwrap();
assert_eq!("Title", channel.title);
}
}
Clean up tests a little bit
// Copyright 2015 Corey Farwell
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate xml;
use std::ascii::AsciiExt;
use std::io;
use xml::{Element, ElementBuilder, Parser, Xml};
trait ElementUtils {
fn tag_with_text(&mut self, child_name: &'static str, child_body: &str);
fn tag_with_optional_text(&mut self, child_name: &'static str, child_body: &Option<String>);
}
impl ElementUtils for Element {
fn tag_with_text(&mut self, child_name: &'static str, child_body: &str) {
self.tag(elem_with_text(child_name, child_body));
}
fn tag_with_optional_text(&mut self, child_name: &'static str, child_body: &Option<String>) {
if let Some(ref c) = *child_body {
self.tag_with_text(child_name, &c);
}
}
}
fn elem_with_text(tag_name: &'static str, chars: &str) -> Element {
let mut elem = Element::new(tag_name.to_string(), None, vec![]);
elem.text(chars.to_string());
elem
}
trait ViaXml {
fn to_xml(&self) -> Element;
fn from_xml(element: Element) -> Result<Self, &'static str>;
}
/// RSS
///
/// "At the top level, a RSS document is a \<rss\> element, with a mandatory attribute called
/// version, that specifies the version of RSS that the document conforms to. If it conforms to
/// this specification, the version attribute must be 2.0."
///
/// [RSS 2.0 Specification § RSS]
/// (http://cyber.law.harvard.edu/rss/rss.html#whatIsRss)
#[derive(Default)]
pub struct Rss(pub Channel);
impl ViaXml for Rss {
fn to_xml(&self) -> Element {
let mut rss = Element::new("rss".to_string(), None, vec![("version".to_string(), None, "2.0".to_string())]);
let &Rss(ref channel) = self;
rss.tag(channel.to_xml());
rss
}
fn from_xml(rss_elem: Element) -> Result<Self, &'static str> {
if rss_elem.name.to_ascii_lowercase() != "rss" {
panic!("Expected <rss>, found <{}>", rss_elem.name);
}
let channel_elem = match rss_elem.get_child("channel", None) {
Some(elem) => elem,
None => return Err("No <channel> element found in <rss>"),
};
let channel = try!(ViaXml::from_xml(channel_elem.clone()));
Ok(Rss(channel))
}
}
impl Rss {
pub fn to_string(&self) -> String {
let mut ret = format!("{}", Xml::PINode("xml version='1.0' encoding='UTF-8'".to_string()));
ret.push_str(&format!("{}", self.to_xml()));
ret
}
pub fn from_reader(reader: &mut io::Read) -> Result<Self, &'static str> {
let mut rss_string = String::new();
if let Err(..) = reader.read_to_string(&mut rss_string) {
return Err("Error reading string from reader");
}
let mut parser = Parser::new();
parser.feed_str(&rss_string);
let mut builder = ElementBuilder::new();
for event in parser {
if let Some(Ok(element)) = builder.handle_event(event) {
return ViaXml::from_xml(element);
}
}
Err("RSS read error")
}
}
/// Channel
///
/// "Subordinate to the \<rss\> element is a single \<channel\> element, which contains information
/// about the channel (metadata) and its contents."
///
/// [RSS 2.0 Specification § Channel]
/// (http://cyber.law.harvard.edu/rss/rss.html#requiredChannelElements)
///
/// ## Examples
///
/// ```
/// use rss::Channel;
/// use std::default::Default;
///
/// let channel = Channel {
/// title: "My Blog".to_string(),
/// link: "http://myblog.com".to_string(),
/// description: "Where I write stuff".to_string(),
/// items: vec![],
/// ..Default::default()
/// };
/// ```
#[derive(Default)]
pub struct Channel {
pub title: String,
pub link: String,
pub description: String,
pub items: Vec<Item>,
pub language: Option<String>,
pub copyright: Option<String>,
pub managing_editor: Option<String>,
pub web_master: Option<String>,
pub pub_date: Option<String>,
pub last_build_date: Option<String>,
pub categories: Vec<Category>,
pub generator: Option<String>,
pub docs: Option<String>,
// pub cloud:
pub ttl: Option<String>, // TODO: change this to Option<i32>?
pub image: Option<String>,
pub rating: Option<String>,
// pub text_input:
pub skip_hours: Option<String>,
pub skip_days: Option<String>,
}
impl ViaXml for Channel {
fn to_xml(&self) -> Element {
let mut channel = Element::new("channel".to_string(), None, vec![]);
channel.tag_with_text("title", &self.title);
channel.tag_with_text("link", &self.link);
channel.tag_with_text("description", &self.description);
for item in &self.items {
channel.tag(item.to_xml());
}
channel.tag_with_optional_text("language", &self.language);
channel.tag_with_optional_text("copyright", &self.copyright);
channel.tag_with_optional_text("managingEditor", &self.managing_editor);
channel.tag_with_optional_text("webMaster", &self.web_master);
channel.tag_with_optional_text("pubDate", &self.pub_date);
channel.tag_with_optional_text("lastBuildDate", &self.last_build_date);
channel.tag_with_optional_text("generator", &self.generator);
channel.tag_with_optional_text("docs", &self.docs);
channel.tag_with_optional_text("ttl", &self.ttl);
channel.tag_with_optional_text("image", &self.image);
channel.tag_with_optional_text("rating", &self.rating);
channel.tag_with_optional_text("skipHours", &self.skip_hours);
channel.tag_with_optional_text("skipDays", &self.skip_days);
for category in &self.categories {
channel.tag(category.to_xml());
}
channel
}
fn from_xml(element: Element) -> Result<Self, &'static str> {
let title = match element.get_child("title", None) {
Some(element) => element.content_str(),
None => return Err("<channel> is missing required <title> element"),
};
let link = match element.get_child("link", None) {
Some(element) => element.content_str(),
None => return Err("<channel> is missing required <link> element"),
};
let description = match element.get_child("description", None) {
Some(element) => element.content_str(),
None => return Err("<channel> is missing required <description> element"),
};
let items = element.get_children("item", None)
.map(|e| ViaXml::from_xml(e.clone()).unwrap())
.collect();
let language = element.get_child("language", None).map(Element::content_str);
let copyright = element.get_child("copyright", None).map(Element::content_str);
let managing_editor = element.get_child("managing_editor", None).map(Element::content_str);
let web_master = element.get_child("managing_editor", None).map(Element::content_str);
let pub_date = element.get_child("pub_date", None).map(Element::content_str);
let last_build_date = element.get_child("last_build_date", None).map(Element::content_str);
let categories = element.get_children("category", None)
.map(|e| ViaXml::from_xml(e.clone()).unwrap())
.collect();
let generator = element.get_child("generator", None).map(Element::content_str);
let docs = element.get_child("docs", None).map(Element::content_str);
let ttl = element.get_child("ttl", None).map(Element::content_str);
let image = element.get_child("image", None).map(Element::content_str);
let rating = element.get_child("rating", None).map(Element::content_str);
let skip_hours = element.get_child("skip_hours", None).map(Element::content_str);
let skip_days = element.get_child("skip_days", None).map(Element::content_str);
Ok(Channel {
title: title,
link: link,
description: description,
items: items,
language: language,
copyright: copyright,
managing_editor: managing_editor,
web_master: web_master,
pub_date: pub_date,
last_build_date: last_build_date,
categories: categories,
generator: generator,
docs: docs,
ttl: ttl,
image: image,
rating: rating,
skip_hours: skip_hours,
skip_days: skip_days,
})
}
}
/// Item
///
/// [RSS 2.0 Specification § Item]
/// (http://cyber.law.harvard.edu/rss/rss.html#hrelementsOfLtitemgt)
#[derive(Default)]
pub struct Item {
pub title: Option<String>,
pub link: Option<String>,
pub description: Option<String>,
pub author: Option<String>,
pub categories: Vec<Category>,
pub comments: Option<String>,
// pub enclosure
// pub guid
// pub pubDate
// pub source
}
impl ViaXml for Item {
fn to_xml(&self) -> Element {
let mut item = Element::new("item".to_string(), None, vec![]);
item.tag_with_optional_text("title", &self.title);
item.tag_with_optional_text("link", &self.link);
item.tag_with_optional_text("description", &self.description);
item.tag_with_optional_text("author", &self.author);
item.tag_with_optional_text("comments", &self.comments);
for category in &self.categories {
item.tag(category.to_xml());
}
item
}
fn from_xml(element: Element) -> Result<Self, &'static str> {
let title = element.get_child("title", None).map(Element::content_str);
let link = element.get_child("link", None).map(Element::content_str);
let description = element.get_child("description", None).map(Element::content_str);
let author = element.get_child("author", None).map(Element::content_str);
let comments = element.get_child("comments", None).map(Element::content_str);
let categories = element.get_children("category", None)
.map(|e| ViaXml::from_xml(e.clone()).unwrap())
.collect();
Ok(Item {
title: title,
link: link,
description: description,
categories: categories,
author: author,
comments: comments,
})
}
}
/// Category
///
/// [RSS 2.0 Specification § Category]
/// (http://cyber.law.harvard.edu/rss/rss.html#ltcategorygtSubelementOfLtitemgt)
#[derive(Default)]
pub struct Category {
pub domain: Option<String>,
pub value: String,
}
impl ViaXml for Category {
fn to_xml(&self) -> Element {
let mut category = match self.domain {
Some(ref d) => Element::new("category".to_string(), None, vec![("domain".to_string(), None, d.clone())]),
None => Element::new("category".to_string(), None, vec![]),
};
category.text(self.value.clone());
category
}
fn from_xml(elem: Element) -> Result<Self, &'static str> {
let domain = elem.get_attribute("domain", None).map(|s| s.to_string());
let value = elem.content_str();
Ok(Category {
domain: domain,
value: value,
})
}
}
#[cfg(test)]
mod test {
use std::default::Default;
use std::fs::File;
use super::{Rss, Item, Channel};
#[test]
fn test_basic_to_string() {
let item = Item {
title: Some("My first post!".to_string()),
link: Some("http://myblog.com/post1".to_string()),
description: Some("This is my first post".to_string()),
..Default::default()
};
let channel = Channel {
title: "My Blog".to_string(),
link: "http://myblog.com".to_string(),
description: "Where I write stuff".to_string(),
items: vec![item],
..Default::default()
};
let rss = Rss(channel);
assert_eq!(rss.to_string(), "<?xml version=\'1.0\' encoding=\'UTF-8\'?><rss version=\'2.0\'><channel><title>My Blog</title><link>http://myblog.com</link><description>Where I write stuff</description><item><title>My first post!</title><link>http://myblog.com/post1</link><description>This is my first post</description></item></channel></rss>");
}
#[test]
fn test_from_file() {
let mut file = File::open("test-data/pinboard.xml").unwrap();
let rss = Rss::from_reader(&mut file).unwrap();
assert!(rss.to_string().len() > 0);
}
#[test]
fn test_from_read_no_channels() {
let rss_str = "<rss></rss>";
assert!(Rss::from_reader(&mut rss_str.as_bytes()).is_err());
}
#[test]
fn test_from_read_one_channel_no_properties() {
let rss_str = "\
<rss>\
<channel>\
</channel>\
</rss>";
assert!(Rss::from_reader(&mut rss_bytes.as_bytes()).is_err());
}
#[test]
fn test_read_one_channel() {
let rss_str = "\
<rss>\
<channel>\
<title>Hello world!</title>\
<description></description>\
<link></link>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_reader(&mut rss_str.as_bytes()).unwrap();
assert_eq!("Hello world!", channel.title);
}
// Ensure reader ignores the PI XML node and continues to parse the RSS
#[test]
fn test_read_with_pinode() {
let rss_str = "\
<?xml version=\'1.0\' encoding=\'UTF-8\'?>\
<rss>\
<channel>\
<title>Title</title>\
<link></link>\
<description></description>\
</channel>\
</rss>";
let Rss(channel) = Rss::from_reader(&mut rss_str.as_bytes()).unwrap();
assert_eq!("Title", channel.title);
}
}
|
use std::collections::{HashMap, BinaryHeap};
use std::hash::{Hash, Hasher,SipHasher};
use std::any::Any;
use std::borrow::Borrow;
use std::rc::Rc;
#[derive (Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct SiphashID {
data: [u64; 2],
}
pub struct SiphashID_Generator {
data: [SipHasher; 2],
}
impl Hasher for SiphashID_Generator {
fn finish (& self)->u64 {panic!()}//Hack: this is actually for generating SiphashID, not 64-bit
fn write (&mut self, bytes: & [u8]) {self.data [0].write (bytes);self.data [1].write (bytes);}
//TODO: should we implement the others for efficiency?
}
impl SiphashID_Generator {
fn generate (& self)->SiphashID {SiphashID {data: [self.data [0].finish (), self.data [1].finish ()]}}
}
pub trait BaseTime: Ord {
fn min_time ()->Self;
fn max_time ()->Self;
}
impl BaseTime for i64 {
fn min_time ()->i64 {i64::min_value ()}
fn max_time ()->i64 {i64::max_value ()}
}
pub trait Field {
type Data: Any;// = Self;
fn unique_identifier ()->u64;
}
#[derive (Clone, PartialEq, Eq, Hash)]
struct FieldID {
entity: SiphashID,
field: u64,
}
#[derive (Clone, PartialEq, Eq, PartialOrd, Ord)]
struct GenericExtendedTime <Base: BaseTime> {
base: Base,
iteration: u64,
id: SiphashID,
}
pub trait Basics {
type Time: BaseTime;
type Constants;
}
type ExtendedTime <B: Basics> = GenericExtendedTime <B::Time>;
//Note: in the future, we expect we might use a custom hash table type that knows it can rely on SiphashID to already be random, so we don't need to hash it again. This also applies to FieldID, although there may be some complications in that case.
impl Hash for SiphashID {
fn hash <H: Hasher> (& self, state: &mut H) {self.data [0].hash (state);}
}
impl <Base: BaseTime> Hash for GenericExtendedTime <Base>{
fn hash <H: Hasher> (& self, state: &mut H) {self.id.hash (state);}
}
pub trait Accessor {
fn get <Type: Field> (&mut self, id: SiphashID)-> Option <& Type::Data>;
}
pub trait Mutator <B: Basics>: Accessor {
fn get_mutable <Type: Field> (&mut self, id: SiphashID)->Option <&mut Type::Data>;
fn set <Type: Field> (&mut self, id: SiphashID, data: Option <Type::Data>);
fn random_bits (&mut self, bits:u32)->u64;
fn random_id (&mut self)->SiphashID;
fn now (& self)->B::Time;
fn constants (& self)->& B::Constants;
}
pub trait PredictorAccessor <B: Basics>: Accessor {
}
pub trait TimeSteward <'a, B: Basics> {
type A: Accessor;
type M: Mutator <B>;
type P: PredictorAccessor <B>;
type Event;
type Prediction;
type Predictor;
fn insert_fiat_event (&mut self, time: B::Time, distinguisher: u64, event: Self::Event)->bool;
fn erase_fiat_event (&mut self, time: B::Time, distinguisher: u64)->bool;
fn accessor_after (&mut self, time: B::Time)->Self::A;
}
pub trait FlatTimeSteward <'a, B: Basics>:TimeSteward <'a, B> {
fn valid_strictly_between (& self)->(B::Time, B::Time);
}
type GenericEvent <B: Basics, M: Mutator <B>> =Rc <Fn (&mut M)>;
enum GenericPrediction <B: Basics, Event> {
Nothing,
Immediately (Event),
At (B::Time, Event),
}
pub struct StandardFlatTimeSteward <'a, B: Basics> {
entity_states: HashMap< FieldID, Box <Any>>,
last_change: B::Time,
//fiat_events: HashMap< ExtendedTime <B>, Self::Event>,
//predictions: HashMap<ExtendedTime <B>, Prediction<B>>,
upcoming_event_times: BinaryHeap <ExtendedTime <B>>,
}
pub struct StandardFlatTimeStewardAccessor <'a, B: Basics + 'a> {
steward: &'a StandardFlatTimeSteward <'a, B>,
}
pub struct StandardFlatTimeStewardMutator <'a, B: Basics + 'a> {
steward: & 'a mut StandardFlatTimeSteward <'a, B>,
}
pub struct StandardFlatTimeStewardPredictorAccessor <'a, B: Basics + 'a> {
steward: & 'a mut StandardFlatTimeSteward <'a, B>,
}
impl <'a, B: Basics>StandardFlatTimeSteward <'a, B> {
fn new (constants: B::Constants, predictors: Vec<Self::Predictor>)->Self {}
}
impl <'a, B: Basics> Accessor for StandardFlatTimeStewardAccessor <'a, B> {
fn get <Type: Field> (&mut self, id: SiphashID)-> Option <& Type::Data> {
self.steward.entity_states.get (& FieldID {entity: id, field: Type::unique_identifier ()}).map (| something | something.downcast ::<Type::Data> ().unwrap ().borrow ())
}
}
impl <'a, B: Basics> Accessor for StandardFlatTimeStewardMutator <'a, B> {
fn get <Type: Field> (&mut self, id: SiphashID)-> Option <& Type::Data> {
self.steward.entity_states.get (& FieldID {entity: id, field: Type::unique_identifier ()}).map (| something | something.downcast ::<Type::Data> ().unwrap ().borrow ())
}
}
impl <'a, B: Basics> TimeSteward <'a, B> for StandardFlatTimeSteward <'a, B> {
type A =StandardFlatTimeStewardAccessor <'a, B>;
type M =StandardFlatTimeStewardMutator <'a, B>;
type P =StandardFlatTimeStewardPredictorAccessor <'a, B>;
type Event = GenericEvent <B, Self::M>;
type Prediction = GenericPrediction <B, Self::Event>;
type Predictor =Rc <for <'b, 'c> Fn (& 'b mut StandardFlatTimeStewardPredictorAccessor <'c, B>)->Self::Prediction>;
fn insert_fiat_event (&mut self, time: B::Time, distinguisher: u64, event: Self::Event)->bool {}
fn erase_fiat_event (&mut self, time: B::Time, distinguisher: u64)->bool {}
fn accessor_after (&mut self, time: B::Time)->Self::A {}
}
#[test]
fn it_works() {
}
More implementing
use std::hash::{Hash, Hasher,SipHasher};
use std::any::Any;
use std::rc::Rc;
#[derive (Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct SiphashID {
data: [u64; 2],
}
pub struct SiphashID_Generator {
data: [SipHasher; 2],
}
impl Hasher for SiphashID_Generator {
fn finish (& self)->u64 {panic!()}//Hack: this is actually for generating SiphashID, not 64-bit
fn write (&mut self, bytes: & [u8]) {self.data [0].write (bytes);self.data [1].write (bytes);}
//TODO: should we implement the others for efficiency?
}
impl SiphashID_Generator {
fn generate (& self)->SiphashID {SiphashID {data: [self.data [0].finish (), self.data [1].finish ()]}}
}
pub trait BaseTime: Ord {
fn min_time ()->Self;
fn max_time ()->Self;
}
impl BaseTime for i64 {
fn min_time ()->i64 {i64::min_value ()}
fn max_time ()->i64 {i64::max_value ()}
}
pub trait Field {
type Data: Any;// = Self;
fn unique_identifier ()->u64;
}
#[derive (Clone, PartialEq, Eq, Hash)]
struct FieldID {
entity: SiphashID,
field: u64,
}
#[derive (Clone, PartialEq, Eq, PartialOrd, Ord)]
struct GenericExtendedTime <Base: BaseTime> {
base: Base,
iteration: u64,
id: SiphashID,
}
pub trait Basics {
type Time: BaseTime;
type Constants;
}
type ExtendedTime <B: Basics> = GenericExtendedTime <B::Time>;
//Note: in the future, we expect we might use a custom hash table type that knows it can rely on SiphashID to already be random, so we don't need to hash it again. This also applies to FieldID, although there may be some complications in that case.
impl Hash for SiphashID {
fn hash <H: Hasher> (& self, state: &mut H) {self.data [0].hash (state);}
}
impl <Base: BaseTime> Hash for GenericExtendedTime <Base>{
fn hash <H: Hasher> (& self, state: &mut H) {self.id.hash (state);}
}
pub trait Accessor {
fn get <Type: Field> (&mut self, id: SiphashID)-> Option <& Type::Data>;
}
pub trait Mutator <B: Basics>: Accessor {
fn get_mutable <Type: Field> (&mut self, id: SiphashID)->Option <&mut Type::Data>;
fn set <Type: Field> (&mut self, id: SiphashID, data: Option <Type::Data>);
fn random_bits (&mut self, bits:u32)->u64;
fn random_id (&mut self)->SiphashID;
fn now (& self)->B::Time;
fn constants (& self)->& B::Constants;
}
pub trait PredictorAccessor <B: Basics>: Accessor {
}
pub trait TimeSteward <'a, B: Basics> {
type A: Accessor;
type M: Mutator <B>;
type P: PredictorAccessor <B>;
type Event;
type Prediction;
type Predictor;
fn insert_fiat_event (&mut self, time: B::Time, distinguisher: u64, event: Self::Event)->bool;
fn erase_fiat_event (&mut self, time: B::Time, distinguisher: u64)->bool;
fn accessor_after (&mut self, time: B::Time)->Self::A;
}
pub trait FlatTimeSteward <'a, B: Basics>:TimeSteward <'a, B> {
fn valid_strictly_between (& self)->(B::Time, B::Time);
}
type Event <B: Basics, M: Mutator <B>> =Rc <Fn (&mut M)>;
enum Prediction <B: Basics, M: Mutator <B>> {
Nothing,
Immediately (Event <B, M>),
At (B::Time, Event <B, M>),
}
type Predictor <B: Basics, M: Mutator <B>, PA: PredictorAccessor <B>> =Rc <for <'b, 'c> Fn (& 'b mut PA)->Prediction <B, M>>;
mod StandardFlatTimeSteward {
use super::{SiphashID, FieldID, Field, ExtendedTime, Basics, TimeSteward};
use std::collections::{HashMap, BinaryHeap};
use std::any::Any;
use std::borrow::Borrow;
use std::rc::Rc;
pub struct Steward <'a, B: Basics> {
entity_states: HashMap< FieldID, Box <Any>>,
last_change: B::Time,
fiat_events: HashMap< ExtendedTime <B>, Event <'a, B>>,
predictions: HashMap<ExtendedTime <B>, Prediction <'a, B>>,
upcoming_event_times: BinaryHeap <ExtendedTime <B>>,
}
pub struct Accessor <'a, B: Basics + 'a> {
steward: &'a Steward <'a, B>,
}
pub struct Mutator <'a, B: Basics + 'a> {
steward: & 'a mut Steward <'a, B>,
}
pub struct PredictorAccessor <'a, B: Basics + 'a> {
steward: & 'a mut Steward <'a, B>,
}
type Event <'a, B: Basics + 'a> = super::Event <B, Mutator <'a, B>>;
type Prediction <'a, B: Basics + 'a> = super::Prediction <B, Mutator <'a, B>>;
type Predictor <'a, B: Basics + 'a> = super::Predictor <B, Mutator <'a, B>, PredictorAccessor <'a, B>>;
impl <'a, B: Basics>Steward <'a, B> {
fn new (constants: B::Constants, predictors: Vec<Predictor <'a, B>>)->Self {}
}
impl <'a, B: Basics> super::Accessor for Accessor <'a, B> {
fn get <Type: Field> (&mut self, id: SiphashID)-> Option <& Type::Data> {
self.steward.entity_states.get (& FieldID {entity: id, field: Type::unique_identifier ()}).map (| something | something.downcast ::<Type::Data> ().unwrap ().borrow ())
}
}
impl <'a, B: Basics> super::Accessor for Mutator <'a, B> {
fn get <Type: Field> (&mut self, id: SiphashID)-> Option <& Type::Data> {
self.steward.entity_states.get (& FieldID {entity: id, field: Type::unique_identifier ()}).map (| something | something.downcast ::<Type::Data> ().unwrap ().borrow ())
}
}
impl <'a, B: Basics> super::TimeSteward <'a, B> for Steward <'a, B> {
type A =Accessor <'a, B>;
type M =Mutator <'a, B>;
type P =PredictorAccessor <'a, B>;
type Event = Event <'a, B>;
type Prediction = super::Prediction <B, Self::Event>;
type Predictor = super::Predictor <B, Self::P, Self::Prediction>;
fn insert_fiat_event (&mut self, time: B::Time, distinguisher: u64, event: Self::Event)->bool {}
fn erase_fiat_event (&mut self, time: B::Time, distinguisher: u64)->bool {}
fn accessor_after (&mut self, time: B::Time)->Self::A {}
}
}
#[test]
fn it_works() {
}
|
// Copyright 2016 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A somewhat idiomatic Rust wrapper for libclang.
//!
//! ## Supported Versions
//!
//! * 3.5 - [Documentation](https://kylemayes.github.io/clang-rs/3_5/clang)
//! * 3.6 - [Documentation](https://kylemayes.github.io/clang-rs/3_6/clang)
//! * 3.7 - [Documentation](https://kylemayes.github.io/clang-rs/3_7/clang)
//! * 3.8 - [Documentation](https://kylemayes.github.io/clang-rs/3_8/clang)
//! * 3.9 - [Documentation](https://kylemayes.github.io/clang-rs/3_9/clang)
//! * 4.0 - [Documentation](https://kylemayes.github.io/clang-rs/4_0/clang)
//! * 5.0 - [Documentation](https://kylemayes.github.io/clang-rs/5_0/clang)
//! * 6.0 - [Documentation](https://kylemayes.github.io/clang-rs/6_0/clang)
//! * 7.0 - [Documentation](https://kylemayes.github.io/clang-rs/7_0/clang)
//! * 8.0 - [Documentation](https://kylemayes.github.io/clang-rs/8_0/clang)
#![warn(missing_copy_implementations, missing_debug_implementations, missing_docs)]
#![allow(non_upper_case_globals)]
extern crate clang_sys;
extern crate libc;
#[macro_use]
mod utility;
pub mod completion;
pub mod diagnostic;
pub mod documentation;
pub mod source;
pub mod token;
pub mod sonar;
use std::cmp;
use std::fmt;
use std::hash;
use std::mem;
use std::ptr;
use std::slice;
use std::collections::{HashMap};
use std::ffi::{CString};
use std::marker::{PhantomData};
use std::path::{Path, PathBuf};
use std::sync::atomic::{self, AtomicBool};
use clang_sys::*;
use libc::{c_int, c_uint, c_ulong};
use completion::{Completer, CompletionString};
use diagnostic::{Diagnostic};
use documentation::{Comment};
use source::{File, Module, SourceLocation, SourceRange};
use token::{Token};
use utility::{FromError, Nullable};
mod error;
pub use self::error::*;
//================================================
// Enums
//================================================
// Accessibility _________________________________
/// Indicates the accessibility of a declaration or base class specifier.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Accessibility {
/// The declaration or base class specifier is private.
Private = 3,
/// The declaration or base class specifier is protected.
Protected = 2,
/// The declaration or base class specifier is public.
Public = 1,
}
// Availability __________________________________
/// Indicates the availability of an AST entity.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Availability {
/// The entity is available.
Available = 0,
/// The entity is available but has been deprecated and any usage of it will be a warning.
Deprecated = 1,
/// The entity is available but is not accessible and any usage of it will be an error.
Inaccessible = 3,
/// The entity is not available and any usage of it will be an error.
Unavailable = 2,
}
// CallingConvention _____________________________
/// Indicates the calling convention specified for a function type.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum CallingConvention {
/// The function type uses a calling convention that is not exposed via this interface.
Unexposed = 200,
/// The function type uses the x86 `cdecl` calling convention.
Cdecl = 1,
/// The function type uses the x86 `fastcall` calling convention.
Fastcall = 3,
/// The function type uses the x86 `pascal` calling convention.
Pascal = 5,
/// The function type uses the x86 `stdcall` calling convention.
Stdcall = 2,
/// The function type uses the x86 `thiscall` calling convention.
Thiscall = 4,
/// The function type uses the x86 `vectorcall` calling convention.
///
/// Only produced by `libclang` 3.6 and later.
Vectorcall = 12,
/// The function type uses the calling convention for the Swift programming language.
///
/// Only produced by `libclang` 3.9 and later.
Swift = 13,
/// The function type uses a calling convention that perserves most registers.
///
/// Only produced by `libclang` 3.9 and later.
PreserveMost = 14,
/// The function type uses a calling convention that preverses nearly all registers.
///
/// Only produced by `libclang` 3.9 and later.
PreserveAll = 15,
/// The function type uses the ARM AACPS calling convention.
Aapcs = 6,
/// The function type uses the ARM AACPS-VFP calling convention.
AapcsVfp = 7,
/// The function type uses the calling convention for Intel OpenCL built-ins.
IntelOcl = 9,
/// The function type uses a calling convention that passes as many values in registers as
/// possible.
///
/// Only produced by `libclang` 4.0 and later.
RegCall = 8,
/// The function type uses the x64 C calling convention as specified in the System V ABI.
SysV64 = 11,
/// The function type uses the x64 C calling convention as implemented on Windows.
Win64 = 10,
}
// EntityKind ____________________________________
/// Indicates the categorization of an AST entity.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum EntityKind {
/// A declaration whose specific type is not exposed via this interface.
UnexposedDecl = 1,
/// A C or C++ struct.
StructDecl = 2,
/// A C or C++ union.
UnionDecl = 3,
/// A C++ class.
ClassDecl = 4,
/// An enum.
EnumDecl = 5,
/// A C field or C++ non-static data member in a struct, union, or class.
FieldDecl = 6,
/// An enum constant.
EnumConstantDecl = 7,
/// A function.
FunctionDecl = 8,
/// A variable.
VarDecl = 9,
/// A parameter.
ParmDecl = 10,
/// An Objective-C `@interface`.
ObjCInterfaceDecl = 11,
/// An Objective-C `@interface` for a category.
ObjCCategoryDecl = 12,
/// An Objective-C `@protocol` declaration.
ObjCProtocolDecl = 13,
/// An Objective-C `@property` declaration.
ObjCPropertyDecl = 14,
/// An Objective-C instance variable.
ObjCIvarDecl = 15,
/// An Objective-C instance method.
ObjCInstanceMethodDecl = 16,
/// An Objective-C class method.
ObjCClassMethodDecl = 17,
/// An Objective-C `@implementation`.
ObjCImplementationDecl = 18,
/// An Objective-C `@implementation` for a category.
ObjCCategoryImplDecl = 19,
/// A typedef.
TypedefDecl = 20,
/// A C++ method.
Method = 21,
/// A C++ namespace.
Namespace = 22,
/// A linkage specification (e.g., `extern "C"`).
LinkageSpec = 23,
/// A C++ constructor.
Constructor = 24,
/// A C++ destructor.
Destructor = 25,
/// A C++ conversion function.
ConversionFunction = 26,
/// A C++ template type parameter.
TemplateTypeParameter = 27,
/// A C++ template non-type parameter.
NonTypeTemplateParameter = 28,
/// A C++ template template parameter.
TemplateTemplateParameter = 29,
/// A C++ function template.
FunctionTemplate = 30,
/// A C++ class template.
ClassTemplate = 31,
/// A C++ class template partial specialization.
ClassTemplatePartialSpecialization = 32,
/// A C++ namespace alias declaration.
NamespaceAlias = 33,
/// A C++ using directive.
UsingDirective = 34,
/// A C++ using declaration.
UsingDeclaration = 35,
/// A C++ type alias declaration.
TypeAliasDecl = 36,
/// An Objective-C `@synthesize` definition.
ObjCSynthesizeDecl = 37,
/// An Objective-C `@dynamic` definition.
ObjCDynamicDecl = 38,
/// An access specifier.
AccessSpecifier = 39,
/// A reference to a super class in Objective-C.
ObjCSuperClassRef = 40,
/// A reference to a protocol in Objective-C.
ObjCProtocolRef = 41,
/// A reference to a class in Objective-C.
ObjCClassRef = 42,
/// A reference to a type declaration.
TypeRef = 43,
/// A base class specifier.
BaseSpecifier = 44,
/// A reference to a class template, function template, template template parameter, or class
/// template partial specialization.
TemplateRef = 45,
/// A reference to a namespace or namespace alias.
NamespaceRef = 46,
/// A reference to a member of a struct, union, or class that occurs in some non-expression
/// context.
MemberRef = 47,
/// A reference to a labeled statement.
LabelRef = 48,
/// A reference to a set of overloaded functions or function templates that has not yet been
/// resolved to a specific function or function template.
OverloadedDeclRef = 49,
/// A reference to a variable that occurs in some non-expression context.
VariableRef = 50,
/// An expression whose specific kind is not exposed via this interface.
UnexposedExpr = 100,
/// An expression that refers to some value declaration, such as a function or enumerator.
DeclRefExpr = 101,
/// An expression that refers to the member of a struct, union, or class.
MemberRefExpr = 102,
/// An expression that calls a function.
CallExpr = 103,
/// An expression that sends a message to an Objective-C object or class.
ObjCMessageExpr = 104,
/// An expression that represents a block literal.
BlockExpr = 105,
/// An integer literal.
IntegerLiteral = 106,
/// A floating point number literal.
FloatingLiteral = 107,
/// An imaginary number literal.
ImaginaryLiteral = 108,
/// A string literal.
StringLiteral = 109,
/// A character literal.
CharacterLiteral = 110,
/// A parenthesized expression.
ParenExpr = 111,
/// Any unary expression other than `sizeof` and `alignof`.
UnaryOperator = 112,
/// An array subscript expression (`[C99 6.5.2.1]`).
ArraySubscriptExpr = 113,
/// A built-in binary expression (e.g., `x + y`).
BinaryOperator = 114,
/// A compound assignment expression (e.g., `x += y`).
CompoundAssignOperator = 115,
/// A ternary expression.
ConditionalOperator = 116,
/// An explicit cast in C or a C-style cast in C++.
CStyleCastExpr = 117,
/// A compound literal expression (`[C99 6.5.2.5]`).
CompoundLiteralExpr = 118,
/// A C or C++ initializer list.
InitListExpr = 119,
/// A GNU address of label expression.
AddrLabelExpr = 120,
/// A GNU statement expression.
StmtExpr = 121,
/// A C11 generic selection expression.
GenericSelectionExpr = 122,
/// A GNU `__null` expression.
GNUNullExpr = 123,
/// A C++ `static_cast<>` expression.
StaticCastExpr = 124,
/// A C++ `dynamic_cast<>` expression.
DynamicCastExpr = 125,
/// A C++ `reinterpret_cast<>` expression.
ReinterpretCastExpr = 126,
/// A C++ `const_cast<>` expression.
ConstCastExpr = 127,
/// A C++ cast that uses "function" notation (e.g., `int(0.5)`).
FunctionalCastExpr = 128,
/// A C++ `typeid` expression.
TypeidExpr = 129,
/// A C++ boolean literal.
BoolLiteralExpr = 130,
/// A C++ `nullptr` expression.
NullPtrLiteralExpr = 131,
/// A C++ `this` expression.
ThisExpr = 132,
/// A C++ `throw` expression.
ThrowExpr = 133,
/// A C++ `new` expression.
NewExpr = 134,
/// A C++ `delete` expression.
DeleteExpr = 135,
/// A unary expression.
UnaryExpr = 136,
/// An Objective-C string literal.
ObjCStringLiteral = 137,
/// An Objective-C `@encode` expression.
ObjCEncodeExpr = 138,
/// An Objective-C `@selector` expression.
ObjCSelectorExpr = 139,
/// An Objective-C `@protocol` expression.
ObjCProtocolExpr = 140,
/// An Objective-C bridged cast expression.
ObjCBridgedCastExpr = 141,
/// A C++11 parameter pack expansion expression.
PackExpansionExpr = 142,
/// A C++11 `sizeof...` expression.
SizeOfPackExpr = 143,
/// A C++11 lambda expression.
LambdaExpr = 144,
/// An Objective-C boolean literal.
ObjCBoolLiteralExpr = 145,
/// An Objective-C `self` expression.
ObjCSelfExpr = 146,
/// An OpenMP array section expression.
///
/// Only produced by `libclang` 3.8 and later.
OmpArraySectionExpr = 147,
/// An Objective-C availability check expression (e.g., `@available(macos 10.10, *)`).
///
/// Only produced by `libclang` 3.9 and later.
ObjCAvailabilityCheckExpr = 148,
/// A fixed-point literal.
///
/// Only produced by `libclang` 7.0 and later.
FixedPointLiteral = 149,
/// A statement whose specific kind is not exposed via this interface.
UnexposedStmt = 200,
/// A labelled statement in a function.
LabelStmt = 201,
/// A group of statements (e.g., a function body).
CompoundStmt = 202,
/// A `case` statement.
CaseStmt = 203,
/// A `default` statement.
DefaultStmt = 204,
/// An `if` statement.
IfStmt = 205,
/// A `switch` statement.
SwitchStmt = 206,
/// A `while` statement.
WhileStmt = 207,
/// A `do` statement.
DoStmt = 208,
/// A `for` statement.
ForStmt = 209,
/// A `goto` statement.
GotoStmt = 210,
/// An indirect `goto` statement.
IndirectGotoStmt = 211,
/// A `continue` statement.
ContinueStmt = 212,
/// A `break` statement.
BreakStmt = 213,
/// A `return` statement.
ReturnStmt = 214,
/// An inline assembly statement.
AsmStmt = 215,
/// An Objective-C `@try`-`@catch`-`@finally` statement.
ObjCAtTryStmt = 216,
/// An Objective-C `@catch` statement.
ObjCAtCatchStmt = 217,
/// An Objective-C `@finally` statement.
ObjCAtFinallyStmt = 218,
/// An Objective-C `@throw` statement.
ObjCAtThrowStmt = 219,
/// An Objective-C `@synchronized` statement.
ObjCAtSynchronizedStmt = 220,
/// An Objective-C autorelease pool statement.
ObjCAutoreleasePoolStmt = 221,
/// An Objective-C collection statement.
ObjCForCollectionStmt = 222,
/// A C++ catch statement.
CatchStmt = 223,
/// A C++ try statement.
TryStmt = 224,
/// A C++11 range-based for statement.
ForRangeStmt = 225,
/// A Windows Structured Exception Handling `__try` statement.
SehTryStmt = 226,
/// A Windows Structured Exception Handling `__except` statement.
SehExceptStmt = 227,
/// A Windows Structured Exception Handling `__finally` statement.
SehFinallyStmt = 228,
/// A Windows Structured Exception Handling `__leave` statement.
SehLeaveStmt = 247,
/// A Microsoft inline assembly statement.
MsAsmStmt = 229,
/// A null statement.
NullStmt = 230,
/// An adaptor for mixing declarations with statements and expressions.
DeclStmt = 231,
/// An OpenMP parallel directive.
OmpParallelDirective = 232,
/// An OpenMP SIMD directive.
OmpSimdDirective = 233,
/// An OpenMP for directive.
OmpForDirective = 234,
/// An OpenMP sections directive.
OmpSectionsDirective = 235,
/// An OpenMP section directive.
OmpSectionDirective = 236,
/// An OpenMP single directive.
OmpSingleDirective = 237,
/// An OpenMP parallel for directive.
OmpParallelForDirective = 238,
/// An OpenMP parallel sections directive.
OmpParallelSectionsDirective = 239,
/// An OpenMP task directive.
OmpTaskDirective = 240,
/// An OpenMP master directive.
OmpMasterDirective = 241,
/// An OpenMP critical directive.
OmpCriticalDirective = 242,
/// An OpenMP taskyield directive.
OmpTaskyieldDirective = 243,
/// An OpenMP barrier directive.
OmpBarrierDirective = 244,
/// An OpenMP taskwait directive.
OmpTaskwaitDirective = 245,
/// An OpenMP flush directive.
OmpFlushDirective = 246,
/// An OpenMP ordered directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpOrderedDirective = 248,
/// An OpenMP atomic directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpAtomicDirective = 249,
/// An OpenMP for SIMD directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpForSimdDirective = 250,
/// An OpenMP parallel for SIMD directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpParallelForSimdDirective = 251,
/// An OpenMP target directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpTargetDirective = 252,
/// An OpenMP teams directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpTeamsDirective = 253,
/// An OpenMP taskgroup directive.
///
/// Only produced by `libclang` 3.7 and later.
OmpTaskgroupDirective = 254,
/// An OpenMP cancellation point directive.
///
/// Only produced by `libclang` 3.7 and later.
OmpCancellationPointDirective = 255,
/// An OpenMP cancel directive.
///
/// Only produced by `libclang` 3.7 and later.
OmpCancelDirective = 256,
/// An OpenMP target data directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpTargetDataDirective = 257,
/// An OpenMP task loop directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpTaskLoopDirective = 258,
/// An OpenMP task loop SIMD directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpTaskLoopSimdDirective = 259,
/// An OpenMP distribute directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpDistributeDirective = 260,
/// An OpenMP target enter data directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetEnterDataDirective = 261,
/// An OpenMP target exit data directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetExitDataDirective = 262,
/// An OpenMP target parallel directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetParallelDirective = 263,
/// An OpenMP target parallel for directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetParallelForDirective = 264,
/// An OpenMP target update directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetUpdateDirective = 265,
/// An OpenMP distribute parallel for directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpDistributeParallelForDirective = 266,
/// An OpenMP distribute parallel for SIMD directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpDistributeParallelForSimdDirective = 267,
/// An OpenMP distribute SIMD directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpDistributeSimdDirective = 268,
/// An OpenMP target parallel for SIMD directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetParallelForSimdDirective = 269,
/// An OpenMP target SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetSimdDirective = 270,
/// An OpenMP teams distribute directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeDirective = 271,
/// An OpenMP teams distribute SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeSimdDirective = 272,
/// An OpenMP teams distribute parallel for SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeParallelForSimdDirective = 273,
/// An OpenMP teams distribute parallel for directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeParallelForDirective = 274,
/// An OpenMP target teams directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDirective = 275,
/// An OpenMP target teams distribute directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeDirective = 276,
/// An OpenMP target teams distribute parallel for directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeParallelForDirective = 277,
/// An OpenMP target teams distribute parallel for SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeParallelForSimdDirective = 278,
/// An OpenMP target teams distribute SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeSimdDirective = 279,
/// The top-level AST entity which acts as the root for the other entitys.
TranslationUnit = 300,
/// An attribute whose specific kind is not exposed via this interface.
UnexposedAttr = 400,
/// An attribute applied to an Objective-C IBAction.
IbActionAttr = 401,
/// An attribute applied to an Objective-C IBOutlet.
IbOutletAttr = 402,
/// An attribute applied to an Objective-C IBOutletCollection.
IbOutletCollectionAttr = 403,
/// The `final` attribute.
FinalAttr = 404,
/// The `override` attribute.
OverrideAttr = 405,
/// An annotation attribute.
AnnotateAttr = 406,
/// An ASM label attribute.
AsmLabelAttr = 407,
/// An attribute that requests for packed records (e.g., `__attribute__ ((__packed__))`).
PackedAttr = 408,
/// An attribute that asserts a function has no side effects (e.g., `__attribute__((pure))`).
PureAttr = 409,
/// The `const` attribute.
ConstAttr = 410,
/// An attribute that allows calls to a function to be duplicated by the optimized
/// (e.g., `__attribute__((noduplicate))`).
NoDuplicateAttr = 411,
/// A CUDA constant attribute.
CudaConstantAttr = 412,
/// A CUDA device attribute.
CudaDeviceAttr = 413,
/// A CUDA global attribute.
CudaGlobalAttr = 414,
/// A CUDA host attribute.
CudaHostAttr = 415,
/// A CUDA shared attribute.
///
/// Only produced by `libclang` 3.6 and later.
CudaSharedAttr = 416,
/// A linker visibility attribute.
///
/// Only produced by `libclang` 3.8 and later.
VisibilityAttr = 417,
/// A MSVC DLL export attribute.
///
/// Only produced by `libclang` 3.8 and later.
DllExport = 418,
/// A MSVC DLL import attribute.
///
/// Only produced by `libclang` 3.8 and later.
DllImport = 419,
/// `__attribute__((ns_returns_retained))`
///
/// Only produced by `libclang` 8.0 and later.
NSReturnsRetained = 420,
/// `__attribute__((ns_returns_not_retained))`
///
/// Only produced by `libclang` 8.0 and later.
NSReturnsNotRetained = 421,
/// `__attribute__((ns_returns_autoreleased))`
///
/// Only produced by `libclang` 8.0 and later.
NSReturnsAutoreleased = 422,
/// `__attribute__((ns_consumes_self))`
///
/// Only produced by `libclang` 8.0 and later.
NSConsumesSelf = 423,
/// `__attribute__((ns_consumed))`
///
/// Only produced by `libclang` 8.0 and later.
NSConsumed = 424,
/// `__attribute__((objc_exception))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCException = 425,
/// `__attribute__((NSObject))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCNSObject = 426,
/// `__attribute__((objc_independent_class))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCIndependentClass = 427,
/// `__attribute__((objc_precise_lifetime))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCPreciseLifetime = 428,
/// `__attribute__((objc_returns_inner_pointer))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCReturnsInnerPointer = 429,
/// `__attribute__((objc_requires_super))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCRequiresSuper = 430,
/// `__attribute__((objc_root_class))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCRootClass = 431,
/// `__attribute__((objc_subclassing_restricted))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCSubclassingRestricted = 432,
/// `__attribute__((objc_protocol_requires_explicit_implementation))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCExplicitProtocolImpl = 433,
/// `__attribute__((objc_designated_initializer))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCDesignatedInitializer = 434,
/// `__attribute__((objc_runtime_visible))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCRuntimeVisible = 435,
/// `__attribute__((objc_boxable))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCBoxable = 436,
/// `__attribute__((flag_enum))`
///
/// Only produced by `libclang` 8.0 and later.
FlagEnum = 437,
/// A preprocessing directive.
PreprocessingDirective = 500,
/// A macro definition.
MacroDefinition = 501,
/// A macro expansion.
MacroExpansion = 502,
/// An inclusion directive.
InclusionDirective = 503,
/// A module import declaration.
ModuleImportDecl = 600,
/// A C++11 alias template declaration (e.g., `template <typename T> using M = std::map<T, T>`).
///
/// Only produced by `libclang` 3.8 and later.
TypeAliasTemplateDecl = 601,
/// A `static_assert` node.
///
/// Only produced by `libclang` 3.9 and later.
StaticAssert = 602,
/// A friend declaration.
///
/// Only produced by `libclang` 4.0 and later.
FriendDecl = 603,
/// A single overload in a set of overloads.
///
/// Only produced by `libclang` 3.7 and later.
OverloadCandidate = 700,
}
// EntityVisitResult _____________________________
/// Indicates how a entity visitation should proceed.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum EntityVisitResult {
/// Do not continue visiting entities.
Break = 0,
/// Continue visiting sibling entities iteratively, skipping child entities.
Continue = 1,
/// Continue visiting sibling and child entities recursively, children first.
Recurse = 2,
}
// EvaluationResult ______________________________
/// The result of evaluating an expression.
#[cfg(feature="gte_clang_3_9")]
#[derive(Clone, Debug, PartialEq)]
pub enum EvaluationResult {
/// An evaluation result whose specific type is not exposed via this interface.
Unexposed,
/// A signed integer evaluation result.
SignedInteger(i64),
/// An unsigned integer evaluation result.
///
/// Only produced by `libclang` 4.0 and later. Earlier versions will always return
/// `SignedInteger` for integers.
UnsignedInteger(u64),
/// A floating point number evaluation result.
Float(f64),
/// A string literal evaluation result.
String(CString),
/// An Objective-C string literal evaluation result.
ObjCString(CString),
/// An Objective-C `CFString` evaluation result.
CFString(CString),
/// Any other evaluation result whose value can be represented by a string.
Other(CString),
}
// ExceptionSpecification ________________________
/// Indicates the exception specification of a function.
#[cfg(feature="gte_clang_5_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum ExceptionSpecification {
/// The function has a basic `noexcept` specification.
BasicNoexcept = 4,
/// The function has a computed `noexcept` specification.
ComputedNoexcept = 5,
/// The function has a `throw(T1, T2)` specification.
Dynamic = 2,
/// The function has a `throw(...)` specification.
DynamicAny = 3,
/// The function has a `throw()` specification.
DynamicNone = 1,
/// The function has an exception specification that has not yet been evaluated.
Unevaluated = 6,
/// The function has an exception specification that has not yet been instantiated.
Uninstantiated = 7,
/// The function has an exception specification that has not yet been parsed.
Unparsed = 8,
}
// Language ______________________________________
/// Indicates the language used by a declaration.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Language {
/// The declaration uses the C programming language.
C = 1,
/// The declaration uses the C++ programming language.
Cpp = 3,
/// The declaration uses the Objective-C programming language.
ObjectiveC = 2,
/// The declaration uses the Swift programming language.
///
/// Only produced by `libclang` 5.0 and later.
Swift = 4,
}
// Linkage _______________________________________
/// Indicates the linkage of an AST entity.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Linkage {
/// The AST entity has automatic storage (e.g., variables or parameters).
Automatic = 1,
/// The AST entity is a static variable or static function.
Internal = 2,
/// The AST entity has external linkage.
External = 4,
/// The AST entity has external linkage and lives in a C++ anonymous namespace.
UniqueExternal = 3,
}
// MemoryUsage ___________________________________
/// Indicates the usage category of a quantity of memory.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum MemoryUsage {
/// Expressions, declarations, and types.
Ast = 1,
/// Various tables used by the AST.
AstSideTables = 6,
/// Memory allocated with `malloc` for external AST sources.
ExternalAstSourceMalloc = 9,
/// Memory allocated with `mmap` for external AST sources.
ExternalAstSourceMMap = 10,
/// Cached global code completion results.
GlobalCodeCompletionResults = 4,
/// Identifiers.
Identifiers = 2,
/// The preprocessing record.
PreprocessingRecord = 12,
/// Memory allocated with `malloc` for the preprocessor.
Preprocessor = 11,
/// Header search tables.
PreprocessorHeaderSearch = 14,
/// Selectors.
Selectors = 3,
/// The content cache used by the source manager.
SourceManagerContentCache = 5,
/// Data structures used by the source manager.
SourceManagerDataStructures = 13,
/// Memory allocated with `malloc` for the source manager.
SourceManagerMalloc = 7,
/// Memory allocated with `mmap` for the source manager.
SourceManagerMMap = 8,
}
// Nullability ___________________________________
/// Indicates the nullability of a pointer type.
#[cfg(feature="gte_clang_8_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Nullability {
/// Values of this type can never be null.
NonNull = 0,
/// Values of this type can be null.
Nullable = 1,
/// Whether values of this type can be null is (explicitly) unspecified.
Unspecified = 2,
}
// PrintingPolicyFlag ____________________________
/// Flags for the printing policy.
#[cfg(feature="gte_clang_7_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum PrintingPolicyFlag {
/// Whether to suppress printing specifiers for a given type or declaration.
SuppressSpecifiers = 1,
/// Whether to suppress printing the tag keyword.
SuppressTagKeyword = 2,
/// Whether to include the body of a tag definition.
IncludeTagDefinition = 3,
/// Whether to suppress printing of scope specifiers.
SuppressScope = 4,
/// Whether to suppress printing the parts of scope specifiers that don't need to be written.
SuppressUnwrittenScope = 5,
/// Whether to suppress printing of variable initializers.
SuppressInitializers = 6,
/// Whether to print the size of constant array expressions as written.
PrintConstantArraySizeAsWritten = 7,
/// Whether to print the location of anonymous tags.
PrintAnonymousTagLocations = 8,
/// Whether to suppress printing the __strong lifetime qualifier in ARC.
SuppressStrongLifetime = 9,
/// Whether to suppress printing lifetime qualifiers in ARC.
SuppressLifetimeQualifiers = 10,
/// Whether to suppress printing template arguments in names of C++ constructors.
SuppressTemplateArgsInCXXConstructors = 11,
/// Whether to print 'bool' rather than '_Bool'.
UseBool = 12,
/// Whether to print 'restrict' rather than '__restrict'
UseRestrict = 13,
/// Whether to print 'alignof' rather than '__alignof'
UseAlignof = 14,
/// Whether to print '_Alignof' rather than '__alignof'
UseUnderscoreAlignof = 15,
/// Whether to print '(void)' rather then '()' for a function prototype with zero parameters.
UseVoidForZeroParams = 16,
/// Whether to print terse output.
UseTerseOutput = 17,
/// Whether to do certain refinements needed for producing a proper declaration tag.
PolishForDeclaration = 18,
/// Whether to print 'half' rather than '__fp16'
UseHalf = 19,
/// Whether to print the built-in wchar_t type as '__wchar_t'
UseMsWchar = 20,
/// Whether to include newlines after statements.
IncludeNewlines = 21,
/// Whether to use whitespace and punctuation like MSVC does.
UseMsvcFormatting = 22,
/// Whether to print constant expressions as written.
PrintConstantsAsWritten = 23,
/// Whether to suppress printing the implicit 'self' or 'this' expressions.
SuppressImplicitBase = 24,
/// Whether to print the fully qualified name of function declarations.
PrintFullyQualifiedName = 25,
}
// RefQualifier __________________________________
/// Indicates the ref qualifier of a C++ function or method type.
#[cfg_attr(feature="cargo-clippy", allow(enum_variant_names))]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum RefQualifier {
/// The function or method has an l-value ref qualifier (`&`).
LValue = 1,
/// The function or method has an r-value ref qualifier (`&&`).
RValue = 2,
}
// StorageClass __________________________________
/// Indicates the storage class of a declaration.
#[cfg(feature="gte_clang_3_6")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum StorageClass {
/// The declaration does not specifiy a storage duration and therefore has an automatic storage
/// duration.
None = 1,
/// The declaration specifies an automatic storage duration.
Auto = 6,
/// The declaration specifies an automatic storage duration and that it should be stored in a
/// CPU register
Register = 7,
/// The declaration specifies a static storage duration and internal linkage.
Static = 3,
/// The declaration specifies a static storage duration and external linkage.
Extern = 2,
/// The declaration specifies a static storage duration and external linkage but is not
/// accessible outside the containing translation unit.
PrivateExtern = 4,
/// The declaration specifies a storage duration related to an OpenCL work group.
OpenClWorkGroupLocal = 5,
}
// TemplateArgument ______________________________
/// An argument to a template function specialization.
#[cfg(feature="gte_clang_3_6")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum TemplateArgument<'tu> {
/// A declaration for a pointer, reference, or member pointer non-type template parameter.
Declaration,
/// An expression that has not yet been resolved
Expression,
/// An empty template argument (e.g., one that has not yet been deduced).
Null,
/// A null pointer or null member pointer provided for a non-type template parameter.
Nullptr,
/// A parameter pack.
Pack,
/// A name for a template provided for a template template parameter.
Template,
/// A pack expansion of a name for a template provided for a template template parameter.
TemplateExpansion,
/// An integer.
Integral(i64, u64),
/// A type.
Type(Type<'tu>),
}
// TlsKind _______________________________________
/// Indicates the thread-local storage (TLS) kind of a declaration.
#[cfg(feature="gte_clang_6_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum TlsKind {
/// The declaration uses dynamic TLS.
Dynamic = 1,
/// The declaration uses static TLS.
Static = 2,
}
// TypeKind ______________________________________
/// Indicates the categorization of a type.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum TypeKind {
/// A type whose specific kind is not exposed via this interface.
Unexposed = 1,
/// `void`
Void = 2,
/// `bool` (C++) or `_Bool` (C99)
Bool = 3,
/// The `char` type when it is signed by default.
CharS = 13,
/// The `char` type when it is unsigned by default.
CharU = 4,
/// `signed char`
SChar = 14,
/// `unsigned char`
UChar = 5,
/// `wchar_t`
WChar = 15,
/// `char16_t`
Char16 = 6,
/// `char32_t`
Char32 = 7,
/// `short`
Short = 16,
/// `unsigned short`
UShort = 8,
/// `int`
Int = 17,
/// `unsigned int`
UInt = 9,
/// `long`
Long = 18,
/// `unsigned long`
ULong = 10,
/// `long long`
LongLong = 19,
/// `unsigned long long`
ULongLong = 11,
/// `__int128_t`
Int128 = 20,
/// `__uint128_t`
UInt128 = 12,
/// A half-precision (16-bit) floating point type.
///
/// Only produced by `libclang` 5.0 and later.
Half = 31,
/// A half-precision (16-bit) floating point type.
///
/// Only produced by `libclang` 6.0 and later.
Float16 = 32,
/// `short _Accum`
///
/// Only produced by `libclang` 7.0 and later.
ShortAccum = 33,
/// `_Accum`
///
/// Only produced by `libclang` 7.0 and later.
Accum = 34,
/// `long _Accum`
///
/// Only produced by `libclang` 7.0 and later.
LongAccum = 35,
/// `unsigned short _Accum`
///
/// Only produced by `libclang` 7.0 and later.
UShortAccum = 36,
/// `unsigned _Accum`
///
/// Only produced by `libclang` 7.0 and later.
UAccum = 37,
/// `unsigned long _Accum`
///
/// Only produced by `libclang` 7.0 and later.
ULongAccum = 38,
/// `float`
Float = 21,
/// `double`
Double = 22,
/// `long double`
LongDouble = 23,
/// `nullptr_t` (C++11)
Nullptr = 24,
/// A C99 complex type (e.g., `_Complex float`).
Complex = 100,
/// An unknown dependent type.
Dependent = 26,
/// The type of an unresolved overload set.
Overload = 25,
/// `id` (Objective-C)
ObjCId = 27,
/// `Class` (Objective-C)
ObjCClass = 28,
/// `SEL` (Objective-C)
ObjCSel = 29,
/// `__float128`
///
/// Only produced by `libclang` 3.9 and later.
Float128 = 30,
/// An Objective-C interface type.
ObjCInterface = 108,
/// An Objective-C pointer to object type.
ObjCObjectPointer = 109,
/// A pointer type.
Pointer = 101,
/// A block pointer type (e.g., `void (^)(int)`).
BlockPointer = 102,
/// A pointer to a record member type.
MemberPointer = 117,
/// An l-value reference (e.g. `int&`).
LValueReference = 103,
/// An r-value reference (e.g. `int&&`).
RValueReference = 104,
/// An enum type.
Enum = 106,
/// A record type such as a struct or a class.
Record = 105,
/// A typedef.
Typedef = 107,
/// A function prototype with parameter type information (e.g., `void foo(int)`).
FunctionPrototype = 111,
/// A function prototype without parameter type information (e.g., `void foo()`).
FunctionNoPrototype = 110,
/// An array type with a specified size that is an integer constant expression.
ConstantArray = 112,
/// An array type with a specified size that is a dependent value.
DependentSizedArray = 116,
/// An array type without a specified size.
IncompleteArray = 114,
/// An array type with a specified size that is not an integer constant expression.
VariableArray = 115,
/// A GCC generic vector type.
Vector = 113,
/// A C++11 `decltype(auto)` type.
///
/// Only produced by `libclang` 3.8 and later.
Auto = 118,
/// A type that was referred to using an elaborated type keyword (e.g., `struct S`).
///
/// Only produced by `libclang` 3.9 and later.
Elaborated = 119,
/// An OpenCL pipe type.
///
/// Only produced by `libclang` 5.0 and later.
Pipe = 120,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dRO = 121,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dArrayRO = 122,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dBufferRO = 123,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dRO = 124,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayRO = 125,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dDepthRO = 126,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayDepthRO = 127,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAARO = 128,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAARO = 129,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAADepthRO = 130,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAADepthRO = 131,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage3dRO = 132,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dWO = 133,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dArrayWO = 134,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dBufferWO = 135,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dWO = 136,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayWO = 137,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dDepthWO = 138,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayDepthWO = 139,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAAWO = 140,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAAWO = 141,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAADepthWO = 142,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAADepthWO = 143,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage3dWO = 144,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dRW = 145,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dArrayRW = 146,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dBufferRW = 147,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dRW = 148,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayRW = 149,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dDepthRW = 150,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayDepthRW = 151,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAARW = 152,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAARW = 153,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAADepthRW = 154,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAADepthRW = 155,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage3dRW = 156,
/// An OpenCL sampler type.
///
/// Only produced by `libclang` 5.0 and later.
OCLSampler = 157,
/// An OpenCL event type.
///
/// Only produced by `libclang` 5.0 and later.
OCLEvent = 158,
/// An OpenCL queue type.
///
/// Only produced by `libclang` 5.0 and later.
OCLQueue = 159,
/// An OpenCL reserve ID type.
///
/// Only produced by `libclang` 5.0 and later.
OCLReserveID = 160,
/// An Objective-C object type.
///
/// Only produced by `libclang` 8.0 and later.
ObjCObject = 161,
/// An Objective-C type param.
///
/// Only produced by `libclang` 8.0 and later.
ObjCTypeParam = 162,
/// An attributed type.
///
/// Only produced by `libclang` 8.0 and later.
Attributed = 163,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCMcePayload = 164,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImePayload = 165,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCRefPayload = 166,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCSicPayload = 167,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCMceResult = 168,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeResult = 169,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCRefResult = 170,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCSicResult = 171,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeResultSingleRefStreamout = 172,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeResultDualRefStreamout = 173,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeSingleRefStreamin = 174,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeDualRefStreamin = 175,
}
// Visibility ____________________________________
/// Indicates the linker visibility of an AST element.
#[cfg(feature="gte_clang_3_8")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Visibility {
/// The AST element can be seen by the linker.
Default = 3,
/// The AST element cannot be seen by the linker.
Hidden = 1,
/// The AST element can be seen by the linker but resolves to a symbol inside this object.
Protected = 2,
}
//================================================
// Structs
//================================================
// Clang _________________________________________
static AVAILABLE: AtomicBool = AtomicBool::new(true);
/// An empty type which prevents the use of this library from multiple threads simultaneously.
#[derive(Debug)]
pub struct Clang;
impl Clang {
//- Constructors -----------------------------
/// Constructs a new `Clang`.
///
/// Only one instance of `Clang` is allowed at a time.
///
/// # Failures
///
/// * an instance of `Clang` already exists
/// * a `libclang` shared library could not be found
/// * a `libclang` shared library symbol could not be loaded
#[cfg(feature="runtime")]
pub fn new() -> Result<Clang, String> {
if AVAILABLE.swap(false, atomic::Ordering::SeqCst) {
load().map(|_| Clang)
} else {
Err("an instance of `Clang` already exists".into())
}
}
/// Constructs a new `Clang`.
///
/// Only one instance of `Clang` is allowed at a time.
///
/// # Failures
///
/// * an instance of `Clang` already exists
#[cfg(not(feature="runtime"))]
pub fn new() -> Result<Clang, String> {
if AVAILABLE.swap(false, atomic::Ordering::SeqCst) {
Ok(Clang)
} else {
Err("an instance of `Clang` already exists".into())
}
}
}
#[cfg(feature="runtime")]
impl Drop for Clang {
fn drop(&mut self) {
unload().unwrap();
AVAILABLE.store(true, atomic::Ordering::SeqCst);
}
}
#[cfg(not(feature="runtime"))]
impl Drop for Clang {
fn drop(&mut self) {
AVAILABLE.store(true, atomic::Ordering::SeqCst);
}
}
// CompilationDatabase ________________________________________
/// A compilation database of all information used to compile files in a project.
#[derive(Debug)]
pub struct CompilationDatabase {
ptr: CXCompilationDatabase,
}
impl CompilationDatabase {
/// Creates a compilation database from the database found in the given directory.
pub fn from_directory<P: AsRef<Path>>(path: P) -> Result<CompilationDatabase, ()> {
let path = utility::from_path(path);
unsafe {
let mut error: CXCompilationDatabase_Error = mem::uninitialized();
let ptr = clang_CompilationDatabase_fromDirectory(path.as_ptr(), &mut error);
match error {
CXCompilationDatabase_NoError => Ok(CompilationDatabase { ptr }),
CXCompilationDatabase_CanNotLoadDatabase => Err(()),
_ => unreachable!(),
}
}
}
/// Get all the compile commands from the database.
pub fn get_all_compile_commands(&self) -> CompileCommands {
unsafe {
CompileCommands::from_ptr(clang_CompilationDatabase_getAllCompileCommands(self.ptr))
}
}
/// Find the compile commands for the given file.
pub fn get_compile_commands<P: AsRef<Path>>(&self, path: P) -> Result<CompileCommands, ()> {
// Presumably this returns null if we can't find the given path?
// The Clang docs don't specify.
let path = utility::from_path(path);
let ptr = unsafe { clang_CompilationDatabase_getCompileCommands(self.ptr, path.as_ptr()) };
ptr.map(CompileCommands::from_ptr).ok_or(())
}
}
impl Drop for CompilationDatabase {
fn drop(&mut self) {
unsafe {
clang_CompilationDatabase_dispose(self.ptr);
}
}
}
/// The result of a search in a CompilationDatabase
#[derive(Debug)]
pub struct CompileCommands {
ptr: CXCompileCommands,
}
impl CompileCommands {
fn from_ptr(ptr: CXCompileCommands) -> CompileCommands {
assert!(!ptr.is_null());
CompileCommands { ptr }
}
/// Returns all commands for this search
pub fn get_commands(&self) -> Vec<CompileCommand> {
iter!(
clang_CompileCommands_getSize(self.ptr),
clang_CompileCommands_getCommand(self.ptr),
)
.map(|p| CompileCommand::from_ptr(self, p))
.collect()
}
}
impl Drop for CompileCommands {
fn drop(&mut self) {
unsafe {
clang_CompileCommands_dispose(self.ptr);
}
}
}
/// A compile comand from CompilationDatabase
#[derive(Debug, Copy, Clone)]
pub struct CompileCommand<'cmds> {
ptr: CXCompileCommand,
_marker: PhantomData<&'cmds CompileCommands>,
}
impl<'cmds> CompileCommand<'cmds> {
fn from_ptr(_: &'cmds CompileCommands, ptr: CXCompileCommand) -> CompileCommand<'cmds> {
assert!(!ptr.is_null());
CompileCommand {
ptr,
_marker: PhantomData,
}
}
/// Get the working directory where the command was executed.
pub fn get_directory(&self) -> PathBuf {
utility::to_path(unsafe { clang_CompileCommand_getDirectory(self.ptr) })
}
/// Get the filename associated with the command.
#[cfg(feature="gte_clang_3_8")]
pub fn get_filename(&self) -> PathBuf {
utility::to_path(unsafe { clang_CompileCommand_getFilename(self.ptr) })
}
/// Get all arguments passed to the command.
pub fn get_arguments(&self) -> Vec<String> {
iter!(
clang_CompileCommand_getNumArgs(self.ptr),
clang_CompileCommand_getArg(self.ptr),
)
.map(utility::to_string)
.collect()
}
// TODO: Args, mapped source path, mapped sourth context.
}
// Entity ________________________________________
/// An AST entity.
#[derive(Copy, Clone)]
pub struct Entity<'tu> {
raw: CXCursor,
tu: &'tu TranslationUnit<'tu>,
}
impl<'tu> Entity<'tu> {
//- Constructors -----------------------------
fn from_raw(raw: CXCursor, tu: &'tu TranslationUnit<'tu>) -> Entity<'tu> {
Entity { raw, tu }
}
//- Accessors --------------------------------
/// Evaluates this AST entity, if possible.
#[cfg(feature="gte_clang_3_9")]
pub fn evaluate(&self) -> Option<EvaluationResult> {
macro_rules! string {
($eval:expr) => {
std::ffi::CStr::from_ptr(clang_EvalResult_getAsStr($eval)).to_owned()
};
}
#[cfg(feature="gte_clang_4_0")]
unsafe fn evaluate_integer(e: CXEvalResult) -> EvaluationResult {
if clang_EvalResult_isUnsignedInt(e) != 0 {
EvaluationResult::UnsignedInteger(clang_EvalResult_getAsUnsigned(e) as u64)
} else {
EvaluationResult::SignedInteger(clang_EvalResult_getAsLongLong(e) as i64)
}
}
#[cfg(not(feature="gte_clang_4_0"))]
unsafe fn evaluate_integer(e: CXEvalResult) -> EvaluationResult {
EvaluationResult::SignedInteger(clang_EvalResult_getAsInt(e) as i64)
}
unsafe {
clang_Cursor_Evaluate(self.raw).map(|e| {
assert!(!e.is_null());
let result = match clang_EvalResult_getKind(e) {
CXEval_UnExposed => EvaluationResult::Unexposed,
CXEval_Int => evaluate_integer(e),
CXEval_Float => EvaluationResult::Float(clang_EvalResult_getAsDouble(e) as f64),
CXEval_ObjCStrLiteral => EvaluationResult::ObjCString(string!(e)),
CXEval_StrLiteral => EvaluationResult::String(string!(e)),
CXEval_CFStr => EvaluationResult::CFString(string!(e)),
CXEval_Other => EvaluationResult::Other(string!(e)),
_ => panic!("unexpected eval result: {:?}", e),
};
clang_EvalResult_dispose(e);
result
})
}
}
/// Returns the categorization of this AST entity.
pub fn get_kind(&self) -> EntityKind {
unsafe { mem::transmute(clang_getCursorKind(self.raw)) }
}
/// Returns the display name of this AST entity, if any.
///
/// The display name of an entity contains additional information that helps identify the
/// entity.
pub fn get_display_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getCursorDisplayName(self.raw)) }
}
#[cfg(feature="gte_clang_7_0")]
/// Returns the pretty printer for this declaration.
pub fn get_pretty_printer(&self) -> PrettyPrinter {
unsafe { PrettyPrinter::from_raw(clang_getCursorPrintingPolicy(self.raw), self) }
}
/// Returns the source location of this AST entity, if any.
pub fn get_location(&self) -> Option<SourceLocation<'tu>> {
unsafe { clang_getCursorLocation(self.raw).map(|l| SourceLocation::from_raw(l, self.tu)) }
}
/// Returns the source range of this AST entity, if any.
pub fn get_range(&self) -> Option<SourceRange<'tu>> {
unsafe { clang_getCursorExtent(self.raw).map(|r| SourceRange::from_raw(r, self.tu)) }
}
/// Returns the accessibility of this declaration or base class specifier, if applicable.
pub fn get_accessibility(&self) -> Option<Accessibility> {
unsafe {
match clang_getCXXAccessSpecifier(self.raw) {
CX_CXXInvalidAccessSpecifier => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the arguments of this function or method, if applicable.
pub fn get_arguments(&self) -> Option<Vec<Entity<'tu>>> {
iter_option!(
clang_Cursor_getNumArguments(self.raw),
clang_Cursor_getArgument(self.raw),
).map(|i| i.map(|a| Entity::from_raw(a, self.tu)).collect())
}
/// Returns the availability of this AST entity.
pub fn get_availability(&self) -> Availability {
unsafe { mem::transmute(clang_getCursorAvailability(self.raw)) }
}
/// Returns the width of this bit field, if applicable.
pub fn get_bit_field_width(&self) -> Option<usize> {
unsafe {
let width = clang_getFieldDeclBitWidth(self.raw);
if width >= 0 {
Some(width as usize)
} else {
None
}
}
}
/// Returns the canonical entity for this AST entity.
///
/// In the C family of languages, some types of entities can be declared multiple times. When
/// there are multiple declarations of the same entity, only one will be considered canonical.
pub fn get_canonical_entity(&self) -> Entity<'tu> {
unsafe { Entity::from_raw(clang_getCanonicalCursor(self.raw), self.tu) }
}
/// Returns the comment associated with this AST entity, if any.
pub fn get_comment(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Cursor_getRawCommentText(self.raw)) }
}
/// Returns the parsed comment associated with this declaration, if application.
pub fn get_parsed_comment(&self) -> Option<Comment<'tu>> {
unsafe { clang_Cursor_getParsedComment(self.raw).map(Comment::from_raw) }
}
/// Returns the brief of the comment associated with this AST entity, if any.
pub fn get_comment_brief(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Cursor_getBriefCommentText(self.raw)) }
}
/// Returns the source range of the comment associated with this AST entity, if any.
pub fn get_comment_range(&self) -> Option<SourceRange<'tu>> {
unsafe { clang_Cursor_getCommentRange(self.raw).map(|r| SourceRange::from_raw(r, self.tu)) }
}
/// Returns a completion string for this declaration or macro definition, if applicable.
pub fn get_completion_string(&self) -> Option<CompletionString> {
unsafe { clang_getCursorCompletionString(self.raw).map(CompletionString::from_ptr) }
}
/// Returns the child of this AST entity with the supplied index.
pub fn get_child(&self, mut index: usize) -> Option<Entity<'tu>> {
let mut child = None;
self.visit_children(|c, _| {
if index == 0 {
child = Some(c);
EntityVisitResult::Break
} else {
index -= 1;
EntityVisitResult::Continue
}
});
child
}
/// Returns the children of this AST entity.
pub fn get_children(&self) -> Vec<Entity<'tu>> {
let mut children = vec![];
self.visit_children(|c, _| {
children.push(c);
EntityVisitResult::Continue
});
children
}
/// Returns the AST entity that describes the definition of this AST entity, if any.
pub fn get_definition(&self) -> Option<Entity<'tu>> {
unsafe { clang_getCursorDefinition(self.raw).map(|p| Entity::from_raw(p, self.tu)) }
}
/// Returns the value of this enum constant declaration, if applicable.
pub fn get_enum_constant_value(&self) -> Option<(i64, u64)> {
unsafe {
if self.get_kind() == EntityKind::EnumConstantDecl {
let signed = clang_getEnumConstantDeclValue(self.raw);
let unsigned = clang_getEnumConstantDeclUnsignedValue(self.raw);
Some((signed, unsigned))
} else {
None
}
}
}
/// Returns the underlying type of this enum declaration, if applicable.
pub fn get_enum_underlying_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getEnumDeclIntegerType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the exception specification of this AST entity, if applicable.
#[cfg(feature="gte_clang_5_0")]
pub fn get_exception_specification(&self) -> Option<ExceptionSpecification> {
unsafe {
match clang_getCursorExceptionSpecificationType(self.raw) {
-1 | CXCursor_ExceptionSpecificationKind_None => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the `external_source_symbol` attribute attached to this AST entity, if any.
#[cfg(feature="gte_clang_5_0")]
pub fn get_external_symbol(&self) -> Option<ExternalSymbol> {
unsafe {
let mut language: CXString = mem::uninitialized();
let mut defined: CXString = mem::uninitialized();
let mut generated: c_uint = 0;
if clang_Cursor_isExternalSymbol(self.raw, &mut language, &mut defined, &mut generated) != 0 {
Some(ExternalSymbol {
language: utility::to_string(language),
defined: utility::to_string(defined),
generated: generated != 0
})
} else {
None
}
}
}
/// Returns the file included by this inclusion directive, if applicable.
pub fn get_file(&self) -> Option<File<'tu>> {
unsafe { clang_getIncludedFile(self.raw).map(|f| File::from_ptr(f, self.tu)) }
}
/// Returns the language used by this declaration, if applicable.
pub fn get_language(&self) -> Option<Language> {
unsafe {
match clang_getCursorLanguage(self.raw) {
CXLanguage_Invalid => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the lexical parent of this AST entity, if any.
pub fn get_lexical_parent(&self) -> Option<Entity<'tu>> {
unsafe { clang_getCursorLexicalParent(self.raw).map(|p| Entity::from_raw(p, self.tu)) }
}
/// Returns the linkage of this AST entity, if any.
pub fn get_linkage(&self) -> Option<Linkage> {
unsafe {
match clang_getCursorLinkage(self.raw) {
CXLinkage_Invalid => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the mangled name of this AST entity, if any.
#[cfg(feature="gte_clang_3_6")]
pub fn get_mangled_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Cursor_getMangling(self.raw)) }
}
/// Returns the mangled names of this C++ constructor or destructor, if applicable.
#[cfg(feature="gte_clang_3_8")]
pub fn get_mangled_names(&self) -> Option<Vec<String>> {
unsafe { utility::to_string_set_option(clang_Cursor_getCXXManglings(self.raw)) }
}
/// Returns the mangled names of this Objective-C class interface or implementation, if applicable.
#[cfg(feature="gte_clang_6_0")]
pub fn get_mangled_objc_names(&self) -> Option<Vec<String>> {
unsafe { utility::to_string_set_option(clang_Cursor_getObjCManglings(self.raw)) }
}
/// Returns the module imported by this module import declaration, if applicable.
pub fn get_module(&self) -> Option<Module<'tu>> {
unsafe { clang_Cursor_getModule(self.raw).map(|m| Module::from_ptr(m, self.tu)) }
}
/// Returns the name of this AST entity, if any.
pub fn get_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getCursorSpelling(self.raw)) }
}
/// Returns the source ranges of the name of this AST entity.
pub fn get_name_ranges(&self) -> Vec<SourceRange<'tu>> {
unsafe {
(0..).map(|i| clang_Cursor_getSpellingNameRange(self.raw, i, 0)).take_while(|r| {
if clang_Range_isNull(*r) != 0 {
false
} else {
let range = clang_getRangeStart(*r);
let mut file = ptr::null_mut();
let null = ptr::null_mut();
clang_getSpellingLocation(range, &mut file, null, null, null);
!file.is_null()
}
}).map(|r| SourceRange::from_raw(r, self.tu)).collect()
}
}
/// Returns which attributes were applied to this Objective-C property, if applicable.
pub fn get_objc_attributes(&self) -> Option<ObjCAttributes> {
let attributes = unsafe { clang_Cursor_getObjCPropertyAttributes(self.raw, 0) };
if attributes != 0 {
Some(ObjCAttributes::from(attributes))
} else {
None
}
}
/// Returns the name of the method implementing the getter for this Objective-C property, if applicable
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_getter_name(&self) -> Option<String> {
utility::to_string_option(unsafe { clang_Cursor_getObjCPropertyGetterName(self.raw) })
}
/// Returns the element type for this Objective-C `iboutletcollection` attribute, if applicable.
pub fn get_objc_ib_outlet_collection_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getIBOutletCollectionType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the type of the receiver of this Objective-C message, if applicable.
pub fn get_objc_receiver_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Cursor_getReceiverType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the selector index for this Objective-C selector identifier, if applicable.
pub fn get_objc_selector_index(&self) -> Option<usize> {
let index = unsafe { clang_Cursor_getObjCSelectorIndex(self.raw) };
if index >= 0 {
Some(index as usize)
} else {
None
}
}
/// Returns the name of the method implementing the setter for this Objective-C property, if applicable
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_setter_name(&self) -> Option<String> {
utility::to_string_option(unsafe { clang_Cursor_getObjCPropertySetterName(self.raw) })
}
/// Returns the type encoding for this Objective-C declaration, if applicable.
pub fn get_objc_type_encoding(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getDeclObjCTypeEncoding(self.raw)) }
}
/// Returns which qualifiers were applied to this Objective-C method return or parameter type,
/// if applicable.
pub fn get_objc_qualifiers(&self) -> Option<ObjCQualifiers> {
let qualifiers = unsafe { clang_Cursor_getObjCDeclQualifiers(self.raw) };
if qualifiers != 0 {
Some(ObjCQualifiers::from(qualifiers))
} else {
None
}
}
/// Returns the the offset of this field, if applicable.
#[cfg(feature="gte_clang_3_7")]
pub fn get_offset_of_field(&self) -> Result<usize, OffsetofError> {
let offsetof_ = unsafe { clang_Cursor_getOffsetOfField(self.raw) };
OffsetofError::from_error(offsetof_).map(|_| offsetof_ as usize)
}
/// Returns the overloaded declarations referenced by this overloaded declaration reference, if
/// applicable.
pub fn get_overloaded_declarations(&self) -> Option<Vec<Entity<'tu>>> {
let declarations = iter!(
clang_getNumOverloadedDecls(self.raw),
clang_getOverloadedDecl(self.raw),
).map(|e| Entity::from_raw(e, self.tu)).collect::<Vec<_>>();
if !declarations.is_empty() {
Some(declarations)
} else {
None
}
}
/// Returns the methods that were overridden by this method, if applicable.
pub fn get_overridden_methods(&self) -> Option<Vec<Entity<'tu>>> {
unsafe {
let (mut raw, mut count) = (ptr::null_mut(), 0);
clang_getOverriddenCursors(self.raw, &mut raw, &mut count);
if !raw.is_null() {
let raws = slice::from_raw_parts(raw, count as usize);
let methods = raws.iter().map(|e| Entity::from_raw(*e, self.tu)).collect();
clang_disposeOverriddenCursors(raw);
Some(methods)
} else {
None
}
}
}
/// Returns the availability of this declaration on the platforms where it is known, if
/// applicable.
pub fn get_platform_availability(&self) -> Option<Vec<PlatformAvailability>> {
if !self.is_declaration() {
return None;
}
unsafe {
let mut buffer: [CXPlatformAvailability; 32] = [CXPlatformAvailability::default(); 32];
let count = clang_getCursorPlatformAvailability(
self.raw,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
(&mut buffer).as_mut_ptr(),
buffer.len() as c_int,
);
Some((0..count as usize).map(|i| PlatformAvailability::from_raw(buffer[i])).collect())
}
}
/// Returns the AST entity referred to by this AST entity, if any.
pub fn get_reference(&self) -> Option<Entity<'tu>> {
unsafe { clang_getCursorReferenced(self.raw).map(|p| Entity::from_raw(p, self.tu)) }
}
/// Returns the semantic parent of this AST entity, if any.
pub fn get_semantic_parent(&self) -> Option<Entity<'tu>> {
let parent = unsafe { clang_getCursorSemanticParent(self.raw) };
parent.map(|p| Entity::from_raw(p, self.tu))
}
/// Returns the storage class of this declaration, if applicable.
#[cfg(feature="gte_clang_3_6")]
pub fn get_storage_class(&self) -> Option<StorageClass> {
unsafe {
match clang_Cursor_getStorageClass(self.raw) {
CX_SC_Invalid => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the template declaration this template specialization was instantiated from, if
/// applicable.
pub fn get_template(&self) -> Option<Entity<'tu>> {
let parent = unsafe { clang_getSpecializedCursorTemplate(self.raw) };
parent.map(|p| Entity::from_raw(p, self.tu))
}
/// Returns the template arguments for this template function specialization, if applicable.
#[cfg(feature="gte_clang_3_6")]
pub fn get_template_arguments(&self) -> Option<Vec<TemplateArgument<'tu>>> {
let get_type = &clang_Cursor_getTemplateArgumentType;
let get_signed = &clang_Cursor_getTemplateArgumentValue;
let get_unsigned = &clang_Cursor_getTemplateArgumentUnsignedValue;
iter_option!(
clang_Cursor_getNumTemplateArguments(self.raw),
clang_Cursor_getTemplateArgumentKind(self.raw),
).map(|i| {
i.enumerate().map(|(i, t)| {
match t {
CXTemplateArgumentKind_Null => TemplateArgument::Null,
CXTemplateArgumentKind_Type => {
let type_ = unsafe { get_type(self.raw, i as c_uint) };
TemplateArgument::Type(Type::from_raw(type_, self.tu))
},
CXTemplateArgumentKind_Declaration => TemplateArgument::Declaration,
CXTemplateArgumentKind_NullPtr => TemplateArgument::Nullptr,
CXTemplateArgumentKind_Integral => {
let signed = unsafe { get_signed(self.raw, i as c_uint) };
let unsigned = unsafe { get_unsigned(self.raw, i as c_uint) };
TemplateArgument::Integral(signed as i64, unsigned as u64)
},
CXTemplateArgumentKind_Template => TemplateArgument::Template,
CXTemplateArgumentKind_TemplateExpansion => TemplateArgument::TemplateExpansion,
CXTemplateArgumentKind_Expression => TemplateArgument::Expression,
CXTemplateArgumentKind_Pack => TemplateArgument::Pack,
_ => unreachable!(),
}
}).collect()
})
}
/// Returns the categorization of the template specialization that would result from
/// instantiating this template declaration, if applicable.
pub fn get_template_kind(&self) -> Option<EntityKind> {
unsafe {
match clang_getTemplateCursorKind(self.raw) {
CXCursor_NoDeclFound => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the thread-local storage (TLS) kind of this declaration, if applicable.
#[cfg(feature="gte_clang_6_0")]
pub fn get_tls_kind(&self) -> Option<TlsKind> {
unsafe {
match clang_getCursorTLSKind(self.raw) {
CXTLS_None => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the translation unit which contains this AST entity.
pub fn get_translation_unit(&self) -> &'tu TranslationUnit<'tu> {
self.tu
}
/// Returns the type of this AST entity, if any.
pub fn get_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getCursorType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the underlying type of this typedef declaration, if applicable.
pub fn get_typedef_underlying_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getTypedefDeclUnderlyingType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the USR for this AST entity, if any.
pub fn get_usr(&self) -> Option<Usr> {
unsafe { utility::to_string_option(clang_getCursorUSR(self.raw)).map(Usr) }
}
/// Returns the linker visibility for this AST entity, if any.
#[cfg(feature="gte_clang_3_8")]
pub fn get_visibility(&self) -> Option<Visibility> {
unsafe {
match clang_getCursorVisibility(self.raw) {
CXVisibility_Invalid => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the result type of this AST entity, if applicable.
pub fn get_result_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getCursorResultType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns whether this AST entity has any attached attributes.
#[cfg(feature="gte_clang_3_9")]
pub fn has_attributes(&self) -> bool {
unsafe { clang_Cursor_hasAttrs(self.raw) != 0 }
}
/// Returns whether this AST entity is an abstract C++ record.
#[cfg(feature="gte_clang_6_0")]
pub fn is_abstract_record(&self) -> bool {
unsafe { clang_CXXRecord_isAbstract(self.raw) != 0 }
}
/// Returns whether this AST entity is an anonymous record declaration.
#[cfg(feature="gte_clang_3_7")]
pub fn is_anonymous(&self) -> bool {
unsafe { clang_Cursor_isAnonymous(self.raw) != 0 }
}
/// Returns whether this AST entity is a bit field.
pub fn is_bit_field(&self) -> bool {
unsafe { clang_Cursor_isBitField(self.raw) != 0 }
}
/// Returns whether this AST entity is a builtin macro.
#[cfg(feature="gte_clang_3_9")]
pub fn is_builtin_macro(&self) -> bool {
unsafe { clang_Cursor_isMacroBuiltin(self.raw) != 0 }
}
/// Returns whether this AST entity is a const method.
pub fn is_const_method(&self) -> bool {
unsafe { clang_CXXMethod_isConst(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ converting constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_converting_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isConvertingConstructor(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ copy constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_copy_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isCopyConstructor(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ default constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_default_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isDefaultConstructor(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ defaulted constructor or method.
#[cfg(feature="gte_clang_3_9")]
pub fn is_defaulted(&self) -> bool {
unsafe { clang_CXXMethod_isDefaulted(self.raw) != 0 }
}
/// Returns whether this AST entity is a declaration and also the definition of that
/// declaration.
pub fn is_definition(&self) -> bool {
unsafe { clang_isCursorDefinition(self.raw) != 0 }
}
/// Returns whether this AST entity is a dynamic call.
///
/// A dynamic call is either a call to a C++ virtual method or an Objective-C message where the
/// receiver is an object instance, not `super` or a specific class.
pub fn is_dynamic_call(&self) -> bool {
unsafe { clang_Cursor_isDynamicCall(self.raw) != 0 }
}
/// Returns whether this AST entity is a function-like macro.
#[cfg(feature="gte_clang_3_9")]
pub fn is_function_like_macro(&self) -> bool {
unsafe { clang_Cursor_isMacroFunctionLike(self.raw) != 0 }
}
/// Returns whether this AST entity is an inline function.
#[cfg(feature="gte_clang_3_9")]
pub fn is_inline_function(&self) -> bool {
unsafe { clang_Cursor_isFunctionInlined(self.raw) != 0 }
}
/// Returns whether this AST entity is an invalid declaration.
#[cfg(feature="gte_clang_7_0")]
pub fn is_invalid_declaration(&self) -> bool {
unsafe { clang_isInvalidDeclaration(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ default constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_move_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isMoveConstructor(self.raw) != 0 }
}
#[cfg(feature="gte_clang_3_8")]
/// Returns whether this AST entity is a mutable field in a C++ struct or class.
pub fn is_mutable(&self) -> bool {
unsafe { clang_CXXField_isMutable(self.raw) != 0 }
}
/// Returns whether this AST entity is an Objective-C method or property declaration with the
/// `@optional` attribute applied to it.
pub fn is_objc_optional(&self) -> bool {
unsafe { clang_Cursor_isObjCOptional(self.raw) != 0 }
}
/// Returns whether this AST entity is a pure virtual method.
pub fn is_pure_virtual_method(&self) -> bool {
unsafe { clang_CXXMethod_isPureVirtual(self.raw) != 0 }
}
/// Returns whether this AST entity is a scoped enum.
#[cfg(feature="gte_clang_5_0")]
pub fn is_scoped(&self) -> bool {
unsafe { clang_EnumDecl_isScoped(self.raw) != 0 }
}
/// Returns whether this AST entity is a static method.
pub fn is_static_method(&self) -> bool {
unsafe { clang_CXXMethod_isStatic(self.raw) != 0 }
}
/// Returns whether this AST entity is a variadic function or method.
pub fn is_variadic(&self) -> bool {
unsafe { clang_Cursor_isVariadic(self.raw) != 0 }
}
/// Returns whether this AST entity is a virtual base class specifier.
pub fn is_virtual_base(&self) -> bool {
unsafe { clang_isVirtualBase(self.raw) != 0 }
}
/// Returns whether this AST entity is a virtual method.
pub fn is_virtual_method(&self) -> bool {
unsafe { clang_CXXMethod_isVirtual(self.raw) != 0 }
}
/// Visits the children of this AST entity recursively and returns whether visitation was ended
/// by the callback returning `EntityVisitResult::Break`.
///
/// The first argument of the callback is the AST entity being visited and the second argument
/// is the parent of that AST entity. The return value of the callback determines how visitation
/// will proceed.
pub fn visit_children<F: FnMut(Entity<'tu>, Entity<'tu>) -> EntityVisitResult>(
&self, f: F
) -> bool {
trait EntityCallback<'tu> {
fn call(&mut self, entity: Entity<'tu>, parent: Entity<'tu>) -> EntityVisitResult;
}
impl<'tu, F: FnMut(Entity<'tu>, Entity<'tu>) -> EntityVisitResult>
EntityCallback<'tu> for F {
fn call(&mut self, entity: Entity<'tu>, parent: Entity<'tu>) -> EntityVisitResult {
self(entity, parent)
}
}
extern fn visit(
cursor: CXCursor, parent: CXCursor, data: CXClientData
) -> CXChildVisitResult {
unsafe {
let &mut (tu, ref mut callback) =
&mut *(data as *mut (&TranslationUnit, Box<dyn EntityCallback>));
let entity = Entity::from_raw(cursor, tu);
let parent = Entity::from_raw(parent, tu);
mem::transmute(callback.call(entity, parent))
}
}
let mut data = (self.tu, Box::new(f) as Box<dyn EntityCallback>);
unsafe { clang_visitChildren(self.raw, visit, utility::addressof(&mut data)) != 0 }
}
//- Categorization ---------------------------
/// Returns whether this AST entity is categorized as an attribute.
pub fn is_attribute(&self) -> bool {
unsafe { clang_isAttribute(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a declaration.
pub fn is_declaration(&self) -> bool {
unsafe { clang_isDeclaration(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as an expression.
pub fn is_expression(&self) -> bool {
unsafe { clang_isExpression(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a preprocessing entity.
pub fn is_preprocessing(&self) -> bool {
unsafe { clang_isPreprocessing(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a reference.
pub fn is_reference(&self) -> bool {
unsafe { clang_isReference(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a statement.
pub fn is_statement(&self) -> bool {
unsafe { clang_isStatement(self.raw.kind) != 0 }
}
/// Returns whether the categorization of this AST entity is unexposed.
pub fn is_unexposed(&self) -> bool {
unsafe { clang_isUnexposed(self.raw.kind) != 0 }
}
//- Location ---------------------------------
/// Returns whether this AST entity is in a main file.
pub fn is_in_main_file(&self) -> bool {
self.get_range().map_or(false, |r| r.is_in_main_file())
}
/// Returns whether this AST entity is in a system header.
pub fn is_in_system_header(&self) -> bool {
self.get_range().map_or(false, |r| r.is_in_system_header())
}
}
impl<'tu> fmt::Debug for Entity<'tu> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Entity")
.field("kind", &self.get_kind())
.field("display_name", &self.get_display_name())
.field("location", &self.get_location())
.finish()
}
}
impl<'tu> cmp::PartialEq for Entity<'tu> {
fn eq(&self, other: &Entity<'tu>) -> bool {
unsafe { clang_equalCursors(self.raw, other.raw) != 0 }
}
}
impl<'tu> cmp::Eq for Entity<'tu> { }
impl<'tu> hash::Hash for Entity<'tu> {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
unsafe {
let integer = clang_hashCursor(self.raw);
let bytes = (&integer as *const c_uint) as *const u8;
let slice = slice::from_raw_parts(bytes, mem::size_of_val(&integer));
hasher.write(slice);
}
}
}
// ExternalSymbol ________________________________
/// An `external_source_symbol` attribute.
#[cfg(feature="gte_clang_5_0")]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ExternalSymbol {
/// The `language` string from this attribute.
pub language: String,
/// The `definedIn` string from this attribute.
pub defined: String,
/// Whether `generated_declaration` is set for this attribute.
pub generated: bool,
}
// Index _________________________________________
/// A collection of translation units.
pub struct Index<'c> {
ptr: CXIndex,
_marker: PhantomData<&'c Clang>,
}
impl<'c> Index<'c> {
//- Constructors -----------------------------
fn from_ptr(ptr: CXIndex) -> Index<'c> {
assert!(!ptr.is_null());
Index { ptr, _marker: PhantomData }
}
/// Constructs a new `Index`.
///
/// `exclude` determines whether declarations from precompiled headers are excluded and
/// `diagnostics` determines whether diagnostics are printed while parsing source files.
pub fn new(_: &'c Clang, exclude: bool, diagnostics: bool) -> Index<'c> {
unsafe { Index::from_ptr(clang_createIndex(exclude as c_int, diagnostics as c_int)) }
}
//- Accessors --------------------------------
/// Returns a parser for the supplied file.
pub fn parser<F: Into<PathBuf>>(&'c self, f: F) -> Parser<'c> {
Parser::new(self, f)
}
/// Sets the invocation emission path for this index.
#[cfg(feature="gte_clang_6_0")]
pub fn set_invocation_emission_path<P: AsRef<Path>>(&'c self, path: P) {
let path = utility::from_path(path);
unsafe { clang_CXIndex_setInvocationEmissionPathOption(self.ptr, path.as_ptr()); }
}
/// Returns the thread options for this index.
pub fn get_thread_options(&self) -> ThreadOptions {
unsafe { ThreadOptions::from(clang_CXIndex_getGlobalOptions(self.ptr)) }
}
//- Mutators ---------------------------------
/// Sets the thread options for this index.
pub fn set_thread_options(&mut self, options: ThreadOptions) {
unsafe { clang_CXIndex_setGlobalOptions(self.ptr, options.into()); }
}
}
impl<'c> Drop for Index<'c> {
fn drop(&mut self) {
unsafe { clang_disposeIndex(self.ptr); }
}
}
impl<'c> fmt::Debug for Index<'c> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Index")
.field("thread_options", &self.get_thread_options())
.finish()
}
}
// ObjCAttributes ________________________________
options! {
/// Indicates which attributes were applied to an Objective-C property.
options ObjCAttributes: CXObjCPropertyAttrKind {
/// Indicates use of the `readonly` attribute.
pub readonly: CXObjCPropertyAttr_readonly,
/// Indicates use of the `getter` attribute.
pub getter: CXObjCPropertyAttr_getter,
/// Indicates use of the `assign` attribute.
pub assign: CXObjCPropertyAttr_assign,
/// Indicates use of the `readwrite` attribute.
pub readwrite: CXObjCPropertyAttr_readwrite,
/// Indicates use of the `retain` attribute.
pub retain: CXObjCPropertyAttr_retain,
/// Indicates use of the `copy` attribute.
pub copy: CXObjCPropertyAttr_copy,
/// Indicates use of the `nonatomic` attribute.
pub nonatomic: CXObjCPropertyAttr_nonatomic,
/// Indicates use of the `setter` attribute.
pub setter: CXObjCPropertyAttr_setter,
/// Indicates use of the `atomic` attribute.
pub atomic: CXObjCPropertyAttr_atomic,
/// Indicates use of the `weak` attribute.
pub weak: CXObjCPropertyAttr_weak,
/// Indicates use of the `strong` attribute.
pub strong: CXObjCPropertyAttr_strong,
/// Indicates use of the `unsafe_retained` attribute.
pub unsafe_retained: CXObjCPropertyAttr_unsafe_unretained,
}, objcattributes: #[feature="gte_clang_3_9"] {
/// Indicates use of the `class` attribute.
pub class: CXObjCPropertyAttr_class,
}
}
// ObjCQualifiers ________________________________
options! {
/// Indicates which qualifiers were applied to an Objective-C method return or parameter type.
options ObjCQualifiers: CXObjCDeclQualifierKind {
/// Indicates use of the `in` qualifier.
pub in_: CXObjCDeclQualifier_In,
/// Indicates use of the `inout` qualifier.
pub inout: CXObjCDeclQualifier_Inout,
/// Indicates use of the `out` qualifier.
pub out: CXObjCDeclQualifier_Out,
/// Indicates use of the `bycopy` qualifier.
pub bycopy: CXObjCDeclQualifier_Bycopy,
/// Indicates use of the `byref` qualifier.
pub byref: CXObjCDeclQualifier_Byref,
/// Indicates use of the `oneway` qualifier.
pub oneway: CXObjCDeclQualifier_Oneway,
}
}
// Parser ________________________________________
builder! {
/// Parses translation units.
builder Parser: CXTranslationUnit_Flags {
index: &'tu Index<'tu>,
file: PathBuf,
arguments: Vec<CString>,
unsaved: Vec<Unsaved>;
OPTIONS:
/// Sets whether certain code completion results will be cached when the translation unit is
/// reparsed.
///
/// This option increases the time it takes to reparse the translation unit but improves
/// code completion performance.
pub cache_completion_results: CXTranslationUnit_CacheCompletionResults,
/// Sets whether a detailed preprocessing record will be constructed which tracks all macro
/// definitions and instantiations.
pub detailed_preprocessing_record: CXTranslationUnit_DetailedPreprocessingRecord,
/// Sets whether documentation comment briefs will be included in code completion results.
pub briefs_in_completion_results: CXTranslationUnit_IncludeBriefCommentsInCodeCompletion,
/// Sets whether the translation unit will be considered incomplete.
///
/// This option suppresses certain semantic analyses and is typically used when parsing
/// headers with the intent of creating a precompiled header.
pub incomplete: CXTranslationUnit_Incomplete,
/// Sets whether function and method bodies will be skipped.
pub skip_function_bodies: CXTranslationUnit_SkipFunctionBodies,
/// Sets whether processing will continue after a fatal error is encountered.
#[cfg(feature="gte_clang_3_9")]
pub keep_going: CXTranslationUnit_KeepGoing,
/// Sets whether incremental processing will be used.
#[cfg(feature="gte_clang_5_0")]
pub single_file_parse: CXTranslationUnit_SingleFileParse,
/// Sets whether function bodies will only be skipped in the preamble.
///
/// Used in conjunction with `skip_function_bodies`.
#[cfg(feature="gte_clang_7_0")]
pub limit_skip_function_bodies_to_preamble: CXTranslationUnit_LimitSkipFunctionBodiesToPreamble,
/// Sets whether attributed types should be included.
#[cfg(feature="gte_clang_8_0")]
pub include_attributed_types: CXTranslationUnit_IncludeAttributedTypes,
/// Sets whether implicit attributes should be visited.
#[cfg(feature="gte_clang_8_0")]
pub visit_implicit_attributes: CXTranslationUnit_VisitImplicitAttributes,
}
}
impl<'tu> Parser<'tu> {
//- Constructors -----------------------------
fn new<F: Into<PathBuf>>(index: &'tu Index<'tu>, file: F) -> Parser<'tu> {
let flags: CXTranslationUnit_Flags = 0;
Parser { index, file: file.into(), arguments: vec![], unsaved: vec![], flags }
}
//- Mutators ---------------------------------
/// Sets the compiler arguments to provide to `libclang`.
///
/// Any compiler argument that could be supplied to `clang` may be supplied to this
/// function. However, the following arguments are ignored:
///
/// * `-c`
/// * `-emit-ast`
/// * `-fsyntax-only`
/// * `-o` and the following `<output>`
pub fn arguments<S: AsRef<str>>(&mut self, arguments: &[S]) -> &mut Parser<'tu> {
self.arguments = arguments.iter().map(utility::from_string).collect();
self
}
/// Sets the unsaved files to use.
pub fn unsaved(&mut self, unsaved: &[Unsaved]) -> &mut Parser<'tu> {
self.unsaved = unsaved.into();
self
}
//- Accessors --------------------------------
/// Parses a translation unit.
///
/// # Failures
///
/// * an error occurs while deserializing an AST file
/// * `libclang` crashes
/// * an unknown error occurs
pub fn parse(&self) -> Result<TranslationUnit<'tu>, SourceError> {
let arguments = self.arguments.iter().map(|a| a.as_ptr()).collect::<Vec<_>>();
let unsaved = self.unsaved.iter().map(|u| u.as_raw()).collect::<Vec<_>>();
unsafe {
let mut ptr = ptr::null_mut();
let code = clang_parseTranslationUnit2(
self.index.ptr,
utility::from_path(&self.file).as_ptr(),
arguments.as_ptr(),
arguments.len() as c_int,
unsaved.as_ptr() as *mut CXUnsavedFile,
unsaved.len() as c_uint,
self.flags,
&mut ptr,
);
SourceError::from_error(code).map(|_| TranslationUnit::from_ptr(ptr))
}
}
}
// PlatformAvailability __________________________
/// The availability of an AST entity on a particular platform.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct PlatformAvailability {
/// The name of the platform.
pub platform: String,
/// Whether the AST entity is unavailable on the platform.
pub unavailable: bool,
/// The version of the platform in which this AST entity was introduced, if any.
pub introduced: Option<Version>,
/// The version of the platform in which this AST entity was deprecated, if any.
pub deprecated: Option<Version>,
/// The version of the platform in which this AST entity was obsoleted, if any.
pub obsoleted: Option<Version>,
/// A message to display to users (e.g., replacement API suggestions).
pub message: Option<String>,
}
impl PlatformAvailability {
//- Constructors -----------------------------
fn from_raw(mut raw: CXPlatformAvailability) -> PlatformAvailability {
let availability = PlatformAvailability {
platform: utility::to_string(raw.Platform),
unavailable: raw.Unavailable != 0,
introduced: raw.Introduced.map(Version::from_raw),
deprecated: raw.Deprecated.map(Version::from_raw),
obsoleted: raw.Obsoleted.map(Version::from_raw),
message: utility::to_string_option(raw.Message),
};
unsafe { clang_disposeCXPlatformAvailability(&mut raw); }
availability
}
}
// PrettyPrinter _________________________________
/// Pretty prints declarations.
#[cfg(feature="gte_clang_7_0")]
#[derive(Debug)]
pub struct PrettyPrinter<'e> {
ptr: CXPrintingPolicy,
entity: &'e Entity<'e>,
}
#[cfg(feature="gte_clang_7_0")]
impl<'e> PrettyPrinter<'e> {
//- Constructors -----------------------------
fn from_raw(ptr: CXPrintingPolicy, entity: &'e Entity<'e>) -> Self {
assert!(!ptr.is_null());
PrettyPrinter { ptr, entity }
}
//- Accessors --------------------------------
/// Gets the specified flag value.
pub fn get_flag(&self, flag: PrintingPolicyFlag) -> bool {
unsafe { clang_PrintingPolicy_getProperty(self.ptr, mem::transmute(flag)) != 0 }
}
/// Sets the specified flag value.
pub fn set_flag(&self, flag: PrintingPolicyFlag, value: bool) -> &Self {
let value = if value { 1 } else { 0 };
unsafe { clang_PrintingPolicy_setProperty(self.ptr, mem::transmute(flag), value); }
self
}
/// Gets the number of spaces used to indent each line.
pub fn get_indentation_amount(&self) -> u8 {
unsafe { clang_PrintingPolicy_getProperty(self.ptr, CXPrintingPolicy_Indentation) as u8 }
}
/// Sets the number of spaces used to indent each line.
pub fn set_indentation_amount(&self, value: u8) -> &Self {
unsafe {
clang_PrintingPolicy_setProperty(self.ptr, CXPrintingPolicy_Indentation, value.into());
}
self
}
/// Pretty print the declaration.
pub fn print(&self) -> String {
unsafe { utility::to_string(clang_getCursorPrettyPrinted(self.entity.raw, self.ptr)) }
}
}
#[cfg(feature="gte_clang_7_0")]
impl<'e> Drop for PrettyPrinter<'e> {
fn drop(&mut self) {
unsafe { clang_PrintingPolicy_dispose(self.ptr) }
}
}
// Target ________________________________________
/// Information about the target for a translation unit.
#[cfg(feature="gte_clang_5_0")]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Target {
/// The normalized target triple for the target.
pub triple: String,
/// The width of a pointer in the target in bits.
pub pointer_width: usize,
}
#[cfg(feature="gte_clang_5_0")]
impl Target {
//- Constructors -----------------------------
fn from_raw(raw: CXTargetInfo) -> Target {
unsafe {
let target = Target {
triple: utility::to_string(clang_TargetInfo_getTriple(raw)),
pointer_width: clang_TargetInfo_getPointerWidth(raw) as usize,
};
clang_TargetInfo_dispose(raw);
target
}
}
}
// ThreadOptions _________________________________
options! {
/// A set of options that determines which types of threads should use background priority.
#[derive(Default)]
options ThreadOptions: CXGlobalOptFlags {
/// Indicates whether threads creating for editing purposes should use background priority.
pub editing: CXGlobalOpt_ThreadBackgroundPriorityForEditing,
/// Indicates whether threads creating for indexing purposes should use background priority.
pub indexing: CXGlobalOpt_ThreadBackgroundPriorityForIndexing,
}
}
// TranslationUnit _______________________________
/// A preprocessed and parsed source file.
pub struct TranslationUnit<'i> {
ptr: CXTranslationUnit,
_marker: PhantomData<&'i Index<'i>>,
}
impl<'i> TranslationUnit<'i> {
//- Constructors -----------------------------
fn from_ptr(ptr: CXTranslationUnit) -> TranslationUnit<'i> {
assert!(!ptr.is_null());
TranslationUnit { ptr, _marker: PhantomData }
}
/// Constructs a new `TranslationUnit` from an AST file.
///
/// # Failures
///
/// * an unknown error occurs
pub fn from_ast<F: AsRef<Path>>(
index: &'i Index, file: F
) -> Result<TranslationUnit<'i>, ()> {
let path = utility::from_path(file);
let ptr = unsafe { clang_createTranslationUnit(index.ptr, path.as_ptr()) };
ptr.map(TranslationUnit::from_ptr).ok_or(())
}
//- Accessors --------------------------------
/// Returns the diagnostics for this translation unit.
pub fn get_diagnostics(&'i self) -> Vec<Diagnostic<'i>> {
iter!(clang_getNumDiagnostics(self.ptr), clang_getDiagnostic(self.ptr),).map(|d| {
Diagnostic::from_ptr(d, self)
}).collect()
}
/// Returns the entity for this translation unit.
pub fn get_entity(&'i self) -> Entity<'i> {
unsafe { Entity::from_raw(clang_getTranslationUnitCursor(self.ptr), self) }
}
/// Returns the file at the supplied path in this translation unit, if any.
pub fn get_file<F: AsRef<Path>>(&'i self, file: F) -> Option<File<'i>> {
let file = unsafe { clang_getFile(self.ptr, utility::from_path(file).as_ptr()) };
file.map(|f| File::from_ptr(f, self))
}
/// Returns the memory usage of this translation unit.
pub fn get_memory_usage(&self) -> HashMap<MemoryUsage, usize> {
unsafe {
let raw = clang_getCXTUResourceUsage(self.ptr);
let raws = slice::from_raw_parts(raw.entries, raw.numEntries as usize);
let usage = raws.iter().map(|u| (mem::transmute(u.kind), u.amount as usize)).collect();
clang_disposeCXTUResourceUsage(raw);
usage
}
}
/// Returns the source ranges in this translation unit that were skipped by the preprocessor.
///
/// This will always return an empty `Vec` if the translation unit was not constructed with a
/// detailed preprocessing record.
#[cfg(feature="gte_clang_4_0")]
pub fn get_skipped_ranges(&'i self) -> Vec<SourceRange<'i>> {
unsafe {
let raw = clang_getAllSkippedRanges(self.ptr);
let raws = slice::from_raw_parts((*raw).ranges, (*raw).count as usize);
let ranges = raws.iter().map(|r| SourceRange::from_raw(*r, self)).collect();
clang_disposeSourceRangeList(raw);
ranges
}
}
/// Returns information about the target for this translation unit.
#[cfg(feature="gte_clang_5_0")]
pub fn get_target(&self) -> Target {
unsafe { Target::from_raw(clang_getTranslationUnitTargetInfo(self.ptr)) }
}
/// Returns the AST entities which correspond to the supplied tokens, if any.
pub fn annotate(&'i self, tokens: &[Token<'i>]) -> Vec<Option<Entity<'i>>> {
unsafe {
let mut raws = vec![CXCursor::default(); tokens.len()];
let ptr = tokens.as_ptr() as *mut CXToken;
clang_annotateTokens(self.ptr, ptr, tokens.len() as c_uint, raws.as_mut_ptr());
raws.iter().map(|e| e.map(|e| Entity::from_raw(e, self))).collect()
}
}
/// Returns a completer which runs code completion.
pub fn completer<F: Into<PathBuf>>(&self, file: F, line: u32, column: u32) -> Completer {
Completer::new(self, file, line, column)
}
/// Saves this translation unit to an AST file.
///
/// # Failures
///
/// * errors in the translation unit prevent saving
/// * an unknown error occurs
pub fn save<F: AsRef<Path>>(&self, file: F) -> Result<(), SaveError> {
let file = utility::from_path(file);
let flags = CXSaveTranslationUnit_None;
let code = unsafe { clang_saveTranslationUnit(self.ptr, file.as_ptr(), flags) };
SaveError::from_error(code)
}
//- Consumers --------------------------------
/// Consumes this translation unit and reparses the source file it was created from with the
/// same compiler arguments that were used originally.
///
/// # Failures
///
/// * an error occurs while deserializing an AST file
/// * `libclang` crashes
/// * an unknown error occurs
pub fn reparse(self, unsaved: &[Unsaved]) -> Result<TranslationUnit<'i>, SourceError> {
let unsaved = unsaved.iter().map(|u| u.as_raw()).collect::<Vec<_>>();
unsafe {
let code = clang_reparseTranslationUnit(
self.ptr,
unsaved.len() as c_uint,
unsaved.as_ptr() as *mut CXUnsavedFile,
CXReparse_None,
);
SourceError::from_error(code).map(|_| self)
}
}
}
impl<'i> Drop for TranslationUnit<'i> {
fn drop(&mut self) {
unsafe { clang_disposeTranslationUnit(self.ptr); }
}
}
impl<'i> fmt::Debug for TranslationUnit<'i> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let spelling = unsafe { clang_getTranslationUnitSpelling(self.ptr) };
formatter.debug_struct("TranslationUnit")
.field("spelling", &utility::to_string(spelling))
.finish()
}
}
// Type __________________________________________
/// The type of an AST entity.
#[derive(Copy, Clone)]
pub struct Type<'tu> {
raw: CXType,
tu: &'tu TranslationUnit<'tu>,
}
impl<'tu> Type<'tu> {
//- Constructors -----------------------------
fn from_raw(raw: CXType, tu: &'tu TranslationUnit<'tu>) -> Type<'tu> {
Type { raw, tu }
}
//- Accessors --------------------------------
/// Returns the kind of this type.
pub fn get_kind(&self) -> TypeKind {
unsafe { mem::transmute(self.raw.kind) }
}
/// Returns the display name of this type.
pub fn get_display_name(&self) -> String {
unsafe { utility::to_string(clang_getTypeSpelling(self.raw)) }
}
/// Returns the alignment of this type in bytes.
///
/// # Failures
///
/// * this type is a dependent type
/// * this type is an incomplete type
pub fn get_alignof(&self) -> Result<usize, AlignofError> {
let alignof_ = unsafe { clang_Type_getAlignOf(self.raw) };
AlignofError::from_error(alignof_).map(|_| alignof_ as usize)
}
/// Returns the offset of the field with the supplied name in this record type in bits.
///
/// # Failures
///
/// * this record type is a dependent type
/// * this record record type is an incomplete type
/// * this record type does not contain a field with the supplied name
pub fn get_offsetof<F: AsRef<str>>(&self, field: F) -> Result<usize, OffsetofError> {
let field = utility::from_string(field);
let offsetof_ = unsafe { clang_Type_getOffsetOf(self.raw, field.as_ptr()) };
OffsetofError::from_error(offsetof_).map(|_| offsetof_ as usize)
}
/// Returns the size of this type in bytes.
///
/// # Failures
///
/// * this type is a dependent type
/// * this type is an incomplete type
/// * this type is a variable size type
pub fn get_sizeof(&self) -> Result<usize, SizeofError> {
let sizeof_ = unsafe { clang_Type_getSizeOf(self.raw) };
SizeofError::from_error(sizeof_).map(|_| sizeof_ as usize)
}
/// Returns the address space of this type.
#[cfg(feature="gte_clang_5_0")]
pub fn get_address_space(&self) -> usize {
unsafe { clang_getAddressSpace(self.raw) as usize }
}
/// Returns the argument types for this function or method type, if applicable.
pub fn get_argument_types(&self) -> Option<Vec<Type<'tu>>> {
iter_option!(
clang_getNumArgTypes(self.raw),
clang_getArgType(self.raw),
).map(|i| i.map(|t| Type::from_raw(t, self.tu)).collect())
}
/// Returns the calling convention specified for this function type, if applicable.
pub fn get_calling_convention(&self) -> Option<CallingConvention> {
unsafe {
match clang_getFunctionTypeCallingConv(self.raw) {
CXCallingConv_Invalid => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the canonical type for this type.
///
/// The canonical type is the underlying type with all "sugar" removed (e.g., typedefs).
pub fn get_canonical_type(&self) -> Type<'tu> {
unsafe { Type::from_raw(clang_getCanonicalType(self.raw), self.tu) }
}
/// Returns the class type for this member pointer type, if applicable.
pub fn get_class_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Type_getClassType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the AST entity that declared this type, if any.
pub fn get_declaration(&self) -> Option<Entity<'tu>> {
unsafe { clang_getTypeDeclaration(self.raw).map(|e| Entity::from_raw(e, self.tu)) }
}
/// Returns the type named by this elaborated type, if applicable.
#[cfg(feature="gte_clang_3_9")]
pub fn get_elaborated_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Type_getNamedType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the element type for this array, complex, or vector type, if applicable.
pub fn get_element_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getElementType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the exception specification of this type, if applicable.
#[cfg(feature="gte_clang_5_0")]
pub fn get_exception_specification(&self) -> Option<ExceptionSpecification> {
unsafe {
match clang_getExceptionSpecificationType(self.raw) {
-1 | CXCursor_ExceptionSpecificationKind_None => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the fields in this record type, if applicable.
#[cfg(feature="gte_clang_3_7")]
pub fn get_fields(&self) -> Option<Vec<Entity<'tu>>> {
if self.get_kind() == TypeKind::Record {
let mut fields = vec![];
self.visit_fields(|e| {
fields.push(e);
true
});
Some(fields)
} else {
None
}
}
/// Return the type that was modified by this attributed type.
#[cfg(feature="gte_clang_8_0")]
pub fn get_modified_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Type_getModifiedType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the nullability of this pointer type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_nullability(&self) -> Option<Nullability> {
unsafe {
match clang_Type_getNullability(self.raw) {
CXTypeNullability_Invalid => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the encoding of this Objective-C type, if applicable.
#[cfg(feature="gte_clang_3_9")]
pub fn get_objc_encoding(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Type_getObjCEncoding(self.raw)) }
}
/// Returns the base type of this Objective-C type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_object_base_type(&self) -> Option<Type> {
unsafe { clang_Type_getObjCObjectBaseType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the declarations for all protocol references for this Objective-C type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_protocol_declarations(&self) -> Vec<Entity<'tu>> {
iter!(
clang_Type_getNumObjCProtocolRefs(self.raw),
clang_Type_getObjCProtocolDecl(self.raw),
).map(|c| Entity::from_raw(c, self.tu)).collect()
}
/// Returns the type arguments for this Objective-C type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_type_arguments(&self) -> Vec<Type<'tu>> {
iter!(
clang_Type_getNumObjCTypeArgs(self.raw),
clang_Type_getObjCTypeArg(self.raw),
).map(|t| Type::from_raw(t, self.tu)).collect()
}
/// Returns the pointee type for this pointer type, if applicable.
pub fn get_pointee_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getPointeeType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the ref qualifier for this C++ function or method type, if applicable.
pub fn get_ref_qualifier(&self) -> Option<RefQualifier> {
unsafe {
match clang_Type_getCXXRefQualifier(self.raw) {
CXRefQualifier_None => None,
other => Some(mem::transmute(other)),
}
}
}
/// Returns the result type for this function or method type, if applicable.
pub fn get_result_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getResultType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the size of this constant array or vector type, if applicable.
pub fn get_size(&self) -> Option<usize> {
let size = unsafe { clang_getNumElements(self.raw) };
if size >= 0 {
Some(size as usize)
} else {
None
}
}
/// Returns the template argument types for this template class specialization type, if
/// applicable.
pub fn get_template_argument_types(&self) -> Option<Vec<Option<Type<'tu>>>> {
iter_option!(
clang_Type_getNumTemplateArguments(self.raw),
clang_Type_getTemplateArgumentAsType(self.raw),
).map(|i| i.map(|t| t.map(|t| Type::from_raw(t, self.tu))).collect())
}
/// Returns the typedef name of this type, if applicable.
#[cfg(feature="gte_clang_5_0")]
pub fn get_typedef_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getTypedefName(self.raw)) }
}
/// Returns whether this type is qualified with const.
pub fn is_const_qualified(&self) -> bool {
unsafe { clang_isConstQualifiedType(self.raw) != 0 }
}
/// Returns whether this type is an elaborated type, if it can be determined for certain.
pub fn is_elaborated(&self) -> Option<bool> {
if self.raw.kind == 119 {
Some(true)
} else if cfg!(feature="gte_clang_3_9") {
Some(false)
} else {
None
}
}
/// Returns whether this type is plain old data (POD).
pub fn is_pod(&self) -> bool {
unsafe { clang_isPODType(self.raw) != 0 }
}
/// Returns whether this type is qualified with restrict.
pub fn is_restrict_qualified(&self) -> bool {
unsafe { clang_isRestrictQualifiedType(self.raw) != 0 }
}
/// Returns whether this type is a transparent tag typedef.
#[cfg(feature="gte_clang_5_0")]
pub fn is_transparent_tag(&self) -> bool {
unsafe { clang_Type_isTransparentTagTypedef(self.raw) != 0 }
}
/// Returns whether this type is a variadic function type.
pub fn is_variadic(&self) -> bool {
unsafe { clang_isFunctionTypeVariadic(self.raw) != 0 }
}
/// Returns whether this type is qualified with volatile.
pub fn is_volatile_qualified(&self) -> bool {
unsafe { clang_isVolatileQualifiedType(self.raw) != 0 }
}
/// Visits the fields in this record type, returning `None` if this type is not a record type
/// and returning `Some(b)` otherwise where `b` indicates whether visitation was ended by the
/// callback returning `false`.
#[cfg(feature="gte_clang_3_7")]
pub fn visit_fields<F: FnMut(Entity<'tu>) -> bool>(&self, f: F) -> Option<bool> {
if self.get_kind() != TypeKind::Record {
return None;
}
trait Callback<'tu> {
fn call(&mut self, field: Entity<'tu>) -> bool;
}
impl<'tu, F: FnMut(Entity<'tu>) -> bool> Callback<'tu> for F {
fn call(&mut self, field: Entity<'tu>) -> bool {
self(field)
}
}
extern fn visit(cursor: CXCursor, data: CXClientData) -> CXVisitorResult {
unsafe {
let &mut (tu, ref mut callback) =
&mut *(data as *mut (&TranslationUnit, Box<dyn Callback>));
if callback.call(Entity::from_raw(cursor, tu)) {
CXVisit_Continue
} else {
CXVisit_Break
}
}
}
let mut data = (self.tu, Box::new(f) as Box<dyn Callback>);
unsafe {
let data = utility::addressof(&mut data);
Some(clang_Type_visitFields(self.raw, visit, data) == CXVisit_Break)
}
}
//- Categorization ---------------------------
/// Returns whether this type is an integer type.
pub fn is_integer(&self) -> bool {
self.raw.kind >= CXType_Bool && self.raw.kind <= CXType_Int128
}
/// Returns whether this type is a signed integer type.
pub fn is_signed_integer(&self) -> bool {
self.raw.kind >= CXType_Char_S && self.raw.kind <= CXType_Int128
}
/// Returns whether this type is an unsigned integer type.
pub fn is_unsigned_integer(&self) -> bool {
self.raw.kind >= CXType_Bool && self.raw.kind <= CXType_UInt128
}
}
impl<'tu> fmt::Debug for Type<'tu> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Type")
.field("kind", &self.get_kind())
.field("display_name", &self.get_display_name())
.finish()
}
}
impl<'tu> cmp::PartialEq for Type<'tu> {
fn eq(&self, other: &Type<'tu>) -> bool {
unsafe { clang_equalTypes(self.raw, other.raw) != 0 }
}
}
impl<'tu> cmp::Eq for Type<'tu> { }
// Unsaved _______________________________________
/// The path to and unsaved contents of a previously existing file.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Unsaved {
path: CString,
contents: CString,
}
impl Unsaved {
//- Constructors -----------------------------
/// Constructs a new `Unsaved`.
pub fn new<P: AsRef<Path>, C: AsRef<str>>(path: P, contents: C) -> Unsaved {
Unsaved { path: utility::from_path(path), contents: utility::from_string(contents) }
}
//- Accessors --------------------------------
fn as_raw(&self) -> CXUnsavedFile {
CXUnsavedFile {
Filename: self.path.as_ptr(),
Contents: self.contents.as_ptr(),
Length: self.contents.as_bytes().len() as c_ulong,
}
}
}
// Usr ___________________________________________
/// A Unified Symbol Resolution (USR).
///
/// A USR identifies an AST entity and can be used to compare AST entities from different
/// translation units.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Usr(pub String);
impl Usr {
//- Constructors -----------------------------
/// Constructs a new `Usr` from an Objective-C category.
pub fn from_objc_category<C: AsRef<str>>(class: C, category: C) -> Usr {
let class = utility::from_string(class);
let category = utility::from_string(category);
let raw = unsafe { clang_constructUSR_ObjCCategory(class.as_ptr(), category.as_ptr()) };
Usr(utility::to_string(raw))
}
/// Constructs a new `Usr` from an Objective-C class.
pub fn from_objc_class<C: AsRef<str>>(class: C) -> Usr {
let class = utility::from_string(class);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCClass(class.as_ptr()))) }
}
/// Constructs a new `Usr` from an Objective-C instance variable.
pub fn from_objc_ivar<N: AsRef<str>>(class: &Usr, name: N) -> Usr {
utility::with_string(&class.0, |s| {
let name = utility::from_string(name);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCIvar(name.as_ptr(), s))) }
})
}
/// Constructs a new `Usr` from an Objective-C method.
pub fn from_objc_method<N: AsRef<str>>(class: &Usr, name: N, instance: bool) -> Usr {
utility::with_string(&class.0, |s| {
let name = utility::from_string(name);
let instance = instance as c_uint;
let raw = unsafe { clang_constructUSR_ObjCMethod(name.as_ptr(), instance, s) };
Usr(utility::to_string(raw))
})
}
/// Constructs a new `Usr` from an Objective-C property.
pub fn from_objc_property<N: AsRef<str>>(class: &Usr, name: N) -> Usr {
utility::with_string(&class.0, |s| {
let name = utility::from_string(name);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCProperty(name.as_ptr(), s))) }
})
}
/// Constructs a new `Usr` from an Objective-C protocol.
pub fn from_objc_protocol<P: AsRef<str>>(protocol: P) -> Usr {
let string = utility::from_string(protocol);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCProtocol(string.as_ptr()))) }
}
}
// Version _______________________________________
/// A version number in the form `x.y.z`.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct Version {
/// The `x` component of the version number.
pub x: i32,
/// The `y` component of the version number.
pub y: i32,
/// The `z` component of the version number.
pub z: i32,
}
impl Version {
//- Constructors -----------------------------
fn from_raw(raw: CXVersion) -> Version {
Version { x: raw.Major as i32, y: raw.Minor as i32, z: raw.Subminor as i32 }
}
}
//================================================
// Functions
//================================================
/// Returns the version string for the version of `libclang` in use.
pub fn get_version() -> String {
unsafe { utility::to_string(clang_getClangVersion()) }
}
Don't transmute unrecognized enum variants
Fixes UB caused by new variants being added to libclang.
// Copyright 2016 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A somewhat idiomatic Rust wrapper for libclang.
//!
//! ## Supported Versions
//!
//! * 3.5 - [Documentation](https://kylemayes.github.io/clang-rs/3_5/clang)
//! * 3.6 - [Documentation](https://kylemayes.github.io/clang-rs/3_6/clang)
//! * 3.7 - [Documentation](https://kylemayes.github.io/clang-rs/3_7/clang)
//! * 3.8 - [Documentation](https://kylemayes.github.io/clang-rs/3_8/clang)
//! * 3.9 - [Documentation](https://kylemayes.github.io/clang-rs/3_9/clang)
//! * 4.0 - [Documentation](https://kylemayes.github.io/clang-rs/4_0/clang)
//! * 5.0 - [Documentation](https://kylemayes.github.io/clang-rs/5_0/clang)
//! * 6.0 - [Documentation](https://kylemayes.github.io/clang-rs/6_0/clang)
//! * 7.0 - [Documentation](https://kylemayes.github.io/clang-rs/7_0/clang)
//! * 8.0 - [Documentation](https://kylemayes.github.io/clang-rs/8_0/clang)
#![warn(missing_copy_implementations, missing_debug_implementations, missing_docs)]
#![allow(non_upper_case_globals)]
extern crate clang_sys;
extern crate libc;
#[macro_use]
mod utility;
pub mod completion;
pub mod diagnostic;
pub mod documentation;
pub mod source;
pub mod token;
pub mod sonar;
use std::cmp;
use std::fmt;
use std::hash;
use std::mem;
use std::ptr;
use std::slice;
use std::collections::{HashMap};
use std::ffi::{CString};
use std::marker::{PhantomData};
use std::path::{Path, PathBuf};
use std::sync::atomic::{self, AtomicBool};
use clang_sys::*;
use libc::{c_int, c_uint, c_ulong};
use completion::{Completer, CompletionString};
use diagnostic::{Diagnostic};
use documentation::{Comment};
use source::{File, Module, SourceLocation, SourceRange};
use token::{Token};
use utility::{FromError, Nullable};
mod error;
pub use self::error::*;
//================================================
// Enums
//================================================
// Accessibility _________________________________
/// Indicates the accessibility of a declaration or base class specifier.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Accessibility {
/// The declaration or base class specifier is private.
Private = 3,
/// The declaration or base class specifier is protected.
Protected = 2,
/// The declaration or base class specifier is public.
Public = 1,
}
impl Accessibility {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=3 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// Availability __________________________________
/// Indicates the availability of an AST entity.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Availability {
/// The entity is available.
Available = 0,
/// The entity is available but has been deprecated and any usage of it will be a warning.
Deprecated = 1,
/// The entity is available but is not accessible and any usage of it will be an error.
Inaccessible = 3,
/// The entity is not available and any usage of it will be an error.
Unavailable = 2,
}
impl Availability {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
0..=3 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// CallingConvention _____________________________
/// Indicates the calling convention specified for a function type.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum CallingConvention {
/// The function type uses a calling convention that is not exposed via this interface.
Unexposed = 200,
/// The function type uses the x86 `cdecl` calling convention.
Cdecl = 1,
/// The function type uses the x86 `fastcall` calling convention.
Fastcall = 3,
/// The function type uses the x86 `pascal` calling convention.
Pascal = 5,
/// The function type uses the x86 `stdcall` calling convention.
Stdcall = 2,
/// The function type uses the x86 `thiscall` calling convention.
Thiscall = 4,
/// The function type uses the x86 `vectorcall` calling convention.
///
/// Only produced by `libclang` 3.6 and later.
Vectorcall = 12,
/// The function type uses the calling convention for the Swift programming language.
///
/// Only produced by `libclang` 3.9 and later.
Swift = 13,
/// The function type uses a calling convention that perserves most registers.
///
/// Only produced by `libclang` 3.9 and later.
PreserveMost = 14,
/// The function type uses a calling convention that preverses nearly all registers.
///
/// Only produced by `libclang` 3.9 and later.
PreserveAll = 15,
/// The function type uses the ARM AACPS calling convention.
Aapcs = 6,
/// The function type uses the ARM AACPS-VFP calling convention.
AapcsVfp = 7,
/// The function type uses the calling convention for Intel OpenCL built-ins.
IntelOcl = 9,
/// The function type uses a calling convention that passes as many values in registers as
/// possible.
///
/// Only produced by `libclang` 4.0 and later.
RegCall = 8,
/// The function type uses the x64 C calling convention as specified in the System V ABI.
SysV64 = 11,
/// The function type uses the x64 C calling convention as implemented on Windows.
Win64 = 10,
}
impl CallingConvention {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=15 | 200 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// EntityKind ____________________________________
/// Indicates the categorization of an AST entity.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum EntityKind {
// IMPORTANT: If you add variants, update the from_raw() code below.
/// A declaration whose specific type is not exposed via this interface.
UnexposedDecl = 1,
/// A C or C++ struct.
StructDecl = 2,
/// A C or C++ union.
UnionDecl = 3,
/// A C++ class.
ClassDecl = 4,
/// An enum.
EnumDecl = 5,
/// A C field or C++ non-static data member in a struct, union, or class.
FieldDecl = 6,
/// An enum constant.
EnumConstantDecl = 7,
/// A function.
FunctionDecl = 8,
/// A variable.
VarDecl = 9,
/// A parameter.
ParmDecl = 10,
/// An Objective-C `@interface`.
ObjCInterfaceDecl = 11,
/// An Objective-C `@interface` for a category.
ObjCCategoryDecl = 12,
/// An Objective-C `@protocol` declaration.
ObjCProtocolDecl = 13,
/// An Objective-C `@property` declaration.
ObjCPropertyDecl = 14,
/// An Objective-C instance variable.
ObjCIvarDecl = 15,
/// An Objective-C instance method.
ObjCInstanceMethodDecl = 16,
/// An Objective-C class method.
ObjCClassMethodDecl = 17,
/// An Objective-C `@implementation`.
ObjCImplementationDecl = 18,
/// An Objective-C `@implementation` for a category.
ObjCCategoryImplDecl = 19,
/// A typedef.
TypedefDecl = 20,
/// A C++ method.
Method = 21,
/// A C++ namespace.
Namespace = 22,
/// A linkage specification (e.g., `extern "C"`).
LinkageSpec = 23,
/// A C++ constructor.
Constructor = 24,
/// A C++ destructor.
Destructor = 25,
/// A C++ conversion function.
ConversionFunction = 26,
/// A C++ template type parameter.
TemplateTypeParameter = 27,
/// A C++ template non-type parameter.
NonTypeTemplateParameter = 28,
/// A C++ template template parameter.
TemplateTemplateParameter = 29,
/// A C++ function template.
FunctionTemplate = 30,
/// A C++ class template.
ClassTemplate = 31,
/// A C++ class template partial specialization.
ClassTemplatePartialSpecialization = 32,
/// A C++ namespace alias declaration.
NamespaceAlias = 33,
/// A C++ using directive.
UsingDirective = 34,
/// A C++ using declaration.
UsingDeclaration = 35,
/// A C++ type alias declaration.
TypeAliasDecl = 36,
/// An Objective-C `@synthesize` definition.
ObjCSynthesizeDecl = 37,
/// An Objective-C `@dynamic` definition.
ObjCDynamicDecl = 38,
/// An access specifier.
AccessSpecifier = 39,
/// A reference to a super class in Objective-C.
ObjCSuperClassRef = 40,
/// A reference to a protocol in Objective-C.
ObjCProtocolRef = 41,
/// A reference to a class in Objective-C.
ObjCClassRef = 42,
/// A reference to a type declaration.
TypeRef = 43,
/// A base class specifier.
BaseSpecifier = 44,
/// A reference to a class template, function template, template template parameter, or class
/// template partial specialization.
TemplateRef = 45,
/// A reference to a namespace or namespace alias.
NamespaceRef = 46,
/// A reference to a member of a struct, union, or class that occurs in some non-expression
/// context.
MemberRef = 47,
/// A reference to a labeled statement.
LabelRef = 48,
/// A reference to a set of overloaded functions or function templates that has not yet been
/// resolved to a specific function or function template.
OverloadedDeclRef = 49,
/// A reference to a variable that occurs in some non-expression context.
VariableRef = 50,
/// An expression whose specific kind is not exposed via this interface.
UnexposedExpr = 100,
/// An expression that refers to some value declaration, such as a function or enumerator.
DeclRefExpr = 101,
/// An expression that refers to the member of a struct, union, or class.
MemberRefExpr = 102,
/// An expression that calls a function.
CallExpr = 103,
/// An expression that sends a message to an Objective-C object or class.
ObjCMessageExpr = 104,
/// An expression that represents a block literal.
BlockExpr = 105,
/// An integer literal.
IntegerLiteral = 106,
/// A floating point number literal.
FloatingLiteral = 107,
/// An imaginary number literal.
ImaginaryLiteral = 108,
/// A string literal.
StringLiteral = 109,
/// A character literal.
CharacterLiteral = 110,
/// A parenthesized expression.
ParenExpr = 111,
/// Any unary expression other than `sizeof` and `alignof`.
UnaryOperator = 112,
/// An array subscript expression (`[C99 6.5.2.1]`).
ArraySubscriptExpr = 113,
/// A built-in binary expression (e.g., `x + y`).
BinaryOperator = 114,
/// A compound assignment expression (e.g., `x += y`).
CompoundAssignOperator = 115,
/// A ternary expression.
ConditionalOperator = 116,
/// An explicit cast in C or a C-style cast in C++.
CStyleCastExpr = 117,
/// A compound literal expression (`[C99 6.5.2.5]`).
CompoundLiteralExpr = 118,
/// A C or C++ initializer list.
InitListExpr = 119,
/// A GNU address of label expression.
AddrLabelExpr = 120,
/// A GNU statement expression.
StmtExpr = 121,
/// A C11 generic selection expression.
GenericSelectionExpr = 122,
/// A GNU `__null` expression.
GNUNullExpr = 123,
/// A C++ `static_cast<>` expression.
StaticCastExpr = 124,
/// A C++ `dynamic_cast<>` expression.
DynamicCastExpr = 125,
/// A C++ `reinterpret_cast<>` expression.
ReinterpretCastExpr = 126,
/// A C++ `const_cast<>` expression.
ConstCastExpr = 127,
/// A C++ cast that uses "function" notation (e.g., `int(0.5)`).
FunctionalCastExpr = 128,
/// A C++ `typeid` expression.
TypeidExpr = 129,
/// A C++ boolean literal.
BoolLiteralExpr = 130,
/// A C++ `nullptr` expression.
NullPtrLiteralExpr = 131,
/// A C++ `this` expression.
ThisExpr = 132,
/// A C++ `throw` expression.
ThrowExpr = 133,
/// A C++ `new` expression.
NewExpr = 134,
/// A C++ `delete` expression.
DeleteExpr = 135,
/// A unary expression.
UnaryExpr = 136,
/// An Objective-C string literal.
ObjCStringLiteral = 137,
/// An Objective-C `@encode` expression.
ObjCEncodeExpr = 138,
/// An Objective-C `@selector` expression.
ObjCSelectorExpr = 139,
/// An Objective-C `@protocol` expression.
ObjCProtocolExpr = 140,
/// An Objective-C bridged cast expression.
ObjCBridgedCastExpr = 141,
/// A C++11 parameter pack expansion expression.
PackExpansionExpr = 142,
/// A C++11 `sizeof...` expression.
SizeOfPackExpr = 143,
/// A C++11 lambda expression.
LambdaExpr = 144,
/// An Objective-C boolean literal.
ObjCBoolLiteralExpr = 145,
/// An Objective-C `self` expression.
ObjCSelfExpr = 146,
/// An OpenMP array section expression.
///
/// Only produced by `libclang` 3.8 and later.
OmpArraySectionExpr = 147,
/// An Objective-C availability check expression (e.g., `@available(macos 10.10, *)`).
///
/// Only produced by `libclang` 3.9 and later.
ObjCAvailabilityCheckExpr = 148,
/// A fixed-point literal.
///
/// Only produced by `libclang` 7.0 and later.
FixedPointLiteral = 149,
/// A statement whose specific kind is not exposed via this interface.
UnexposedStmt = 200,
/// A labelled statement in a function.
LabelStmt = 201,
/// A group of statements (e.g., a function body).
CompoundStmt = 202,
/// A `case` statement.
CaseStmt = 203,
/// A `default` statement.
DefaultStmt = 204,
/// An `if` statement.
IfStmt = 205,
/// A `switch` statement.
SwitchStmt = 206,
/// A `while` statement.
WhileStmt = 207,
/// A `do` statement.
DoStmt = 208,
/// A `for` statement.
ForStmt = 209,
/// A `goto` statement.
GotoStmt = 210,
/// An indirect `goto` statement.
IndirectGotoStmt = 211,
/// A `continue` statement.
ContinueStmt = 212,
/// A `break` statement.
BreakStmt = 213,
/// A `return` statement.
ReturnStmt = 214,
/// An inline assembly statement.
AsmStmt = 215,
/// An Objective-C `@try`-`@catch`-`@finally` statement.
ObjCAtTryStmt = 216,
/// An Objective-C `@catch` statement.
ObjCAtCatchStmt = 217,
/// An Objective-C `@finally` statement.
ObjCAtFinallyStmt = 218,
/// An Objective-C `@throw` statement.
ObjCAtThrowStmt = 219,
/// An Objective-C `@synchronized` statement.
ObjCAtSynchronizedStmt = 220,
/// An Objective-C autorelease pool statement.
ObjCAutoreleasePoolStmt = 221,
/// An Objective-C collection statement.
ObjCForCollectionStmt = 222,
/// A C++ catch statement.
CatchStmt = 223,
/// A C++ try statement.
TryStmt = 224,
/// A C++11 range-based for statement.
ForRangeStmt = 225,
/// A Windows Structured Exception Handling `__try` statement.
SehTryStmt = 226,
/// A Windows Structured Exception Handling `__except` statement.
SehExceptStmt = 227,
/// A Windows Structured Exception Handling `__finally` statement.
SehFinallyStmt = 228,
/// A Windows Structured Exception Handling `__leave` statement.
SehLeaveStmt = 247,
/// A Microsoft inline assembly statement.
MsAsmStmt = 229,
/// A null statement.
NullStmt = 230,
/// An adaptor for mixing declarations with statements and expressions.
DeclStmt = 231,
/// An OpenMP parallel directive.
OmpParallelDirective = 232,
/// An OpenMP SIMD directive.
OmpSimdDirective = 233,
/// An OpenMP for directive.
OmpForDirective = 234,
/// An OpenMP sections directive.
OmpSectionsDirective = 235,
/// An OpenMP section directive.
OmpSectionDirective = 236,
/// An OpenMP single directive.
OmpSingleDirective = 237,
/// An OpenMP parallel for directive.
OmpParallelForDirective = 238,
/// An OpenMP parallel sections directive.
OmpParallelSectionsDirective = 239,
/// An OpenMP task directive.
OmpTaskDirective = 240,
/// An OpenMP master directive.
OmpMasterDirective = 241,
/// An OpenMP critical directive.
OmpCriticalDirective = 242,
/// An OpenMP taskyield directive.
OmpTaskyieldDirective = 243,
/// An OpenMP barrier directive.
OmpBarrierDirective = 244,
/// An OpenMP taskwait directive.
OmpTaskwaitDirective = 245,
/// An OpenMP flush directive.
OmpFlushDirective = 246,
/// An OpenMP ordered directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpOrderedDirective = 248,
/// An OpenMP atomic directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpAtomicDirective = 249,
/// An OpenMP for SIMD directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpForSimdDirective = 250,
/// An OpenMP parallel for SIMD directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpParallelForSimdDirective = 251,
/// An OpenMP target directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpTargetDirective = 252,
/// An OpenMP teams directive.
///
/// Only produced by `libclang` 3.6 and later.
OmpTeamsDirective = 253,
/// An OpenMP taskgroup directive.
///
/// Only produced by `libclang` 3.7 and later.
OmpTaskgroupDirective = 254,
/// An OpenMP cancellation point directive.
///
/// Only produced by `libclang` 3.7 and later.
OmpCancellationPointDirective = 255,
/// An OpenMP cancel directive.
///
/// Only produced by `libclang` 3.7 and later.
OmpCancelDirective = 256,
/// An OpenMP target data directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpTargetDataDirective = 257,
/// An OpenMP task loop directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpTaskLoopDirective = 258,
/// An OpenMP task loop SIMD directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpTaskLoopSimdDirective = 259,
/// An OpenMP distribute directive.
///
/// Only produced by `libclang` 3.8 and later.
OmpDistributeDirective = 260,
/// An OpenMP target enter data directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetEnterDataDirective = 261,
/// An OpenMP target exit data directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetExitDataDirective = 262,
/// An OpenMP target parallel directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetParallelDirective = 263,
/// An OpenMP target parallel for directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetParallelForDirective = 264,
/// An OpenMP target update directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetUpdateDirective = 265,
/// An OpenMP distribute parallel for directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpDistributeParallelForDirective = 266,
/// An OpenMP distribute parallel for SIMD directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpDistributeParallelForSimdDirective = 267,
/// An OpenMP distribute SIMD directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpDistributeSimdDirective = 268,
/// An OpenMP target parallel for SIMD directive.
///
/// Only produced by `libclang` 3.9 and later.
OmpTargetParallelForSimdDirective = 269,
/// An OpenMP target SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetSimdDirective = 270,
/// An OpenMP teams distribute directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeDirective = 271,
/// An OpenMP teams distribute SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeSimdDirective = 272,
/// An OpenMP teams distribute parallel for SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeParallelForSimdDirective = 273,
/// An OpenMP teams distribute parallel for directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTeamsDistributeParallelForDirective = 274,
/// An OpenMP target teams directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDirective = 275,
/// An OpenMP target teams distribute directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeDirective = 276,
/// An OpenMP target teams distribute parallel for directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeParallelForDirective = 277,
/// An OpenMP target teams distribute parallel for SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeParallelForSimdDirective = 278,
/// An OpenMP target teams distribute SIMD directive.
///
/// Only produced by `libclang` 4.0 and later.
OmpTargetTeamsDistributeSimdDirective = 279,
/// The top-level AST entity which acts as the root for the other entitys.
TranslationUnit = 300,
/// An attribute whose specific kind is not exposed via this interface.
UnexposedAttr = 400,
/// An attribute applied to an Objective-C IBAction.
IbActionAttr = 401,
/// An attribute applied to an Objective-C IBOutlet.
IbOutletAttr = 402,
/// An attribute applied to an Objective-C IBOutletCollection.
IbOutletCollectionAttr = 403,
/// The `final` attribute.
FinalAttr = 404,
/// The `override` attribute.
OverrideAttr = 405,
/// An annotation attribute.
AnnotateAttr = 406,
/// An ASM label attribute.
AsmLabelAttr = 407,
/// An attribute that requests for packed records (e.g., `__attribute__ ((__packed__))`).
PackedAttr = 408,
/// An attribute that asserts a function has no side effects (e.g., `__attribute__((pure))`).
PureAttr = 409,
/// The `const` attribute.
ConstAttr = 410,
/// An attribute that allows calls to a function to be duplicated by the optimized
/// (e.g., `__attribute__((noduplicate))`).
NoDuplicateAttr = 411,
/// A CUDA constant attribute.
CudaConstantAttr = 412,
/// A CUDA device attribute.
CudaDeviceAttr = 413,
/// A CUDA global attribute.
CudaGlobalAttr = 414,
/// A CUDA host attribute.
CudaHostAttr = 415,
/// A CUDA shared attribute.
///
/// Only produced by `libclang` 3.6 and later.
CudaSharedAttr = 416,
/// A linker visibility attribute.
///
/// Only produced by `libclang` 3.8 and later.
VisibilityAttr = 417,
/// A MSVC DLL export attribute.
///
/// Only produced by `libclang` 3.8 and later.
DllExport = 418,
/// A MSVC DLL import attribute.
///
/// Only produced by `libclang` 3.8 and later.
DllImport = 419,
/// `__attribute__((ns_returns_retained))`
///
/// Only produced by `libclang` 8.0 and later.
NSReturnsRetained = 420,
/// `__attribute__((ns_returns_not_retained))`
///
/// Only produced by `libclang` 8.0 and later.
NSReturnsNotRetained = 421,
/// `__attribute__((ns_returns_autoreleased))`
///
/// Only produced by `libclang` 8.0 and later.
NSReturnsAutoreleased = 422,
/// `__attribute__((ns_consumes_self))`
///
/// Only produced by `libclang` 8.0 and later.
NSConsumesSelf = 423,
/// `__attribute__((ns_consumed))`
///
/// Only produced by `libclang` 8.0 and later.
NSConsumed = 424,
/// `__attribute__((objc_exception))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCException = 425,
/// `__attribute__((NSObject))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCNSObject = 426,
/// `__attribute__((objc_independent_class))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCIndependentClass = 427,
/// `__attribute__((objc_precise_lifetime))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCPreciseLifetime = 428,
/// `__attribute__((objc_returns_inner_pointer))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCReturnsInnerPointer = 429,
/// `__attribute__((objc_requires_super))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCRequiresSuper = 430,
/// `__attribute__((objc_root_class))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCRootClass = 431,
/// `__attribute__((objc_subclassing_restricted))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCSubclassingRestricted = 432,
/// `__attribute__((objc_protocol_requires_explicit_implementation))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCExplicitProtocolImpl = 433,
/// `__attribute__((objc_designated_initializer))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCDesignatedInitializer = 434,
/// `__attribute__((objc_runtime_visible))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCRuntimeVisible = 435,
/// `__attribute__((objc_boxable))`
///
/// Only produced by `libclang` 8.0 and later.
ObjCBoxable = 436,
/// `__attribute__((flag_enum))`
///
/// Only produced by `libclang` 8.0 and later.
FlagEnum = 437,
/// A preprocessing directive.
PreprocessingDirective = 500,
/// A macro definition.
MacroDefinition = 501,
/// A macro expansion.
MacroExpansion = 502,
/// An inclusion directive.
InclusionDirective = 503,
/// A module import declaration.
ModuleImportDecl = 600,
/// A C++11 alias template declaration (e.g., `template <typename T> using M = std::map<T, T>`).
///
/// Only produced by `libclang` 3.8 and later.
TypeAliasTemplateDecl = 601,
/// A `static_assert` node.
///
/// Only produced by `libclang` 3.9 and later.
StaticAssert = 602,
/// A friend declaration.
///
/// Only produced by `libclang` 4.0 and later.
FriendDecl = 603,
/// A single overload in a set of overloads.
///
/// Only produced by `libclang` 3.7 and later.
OverloadCandidate = 700,
}
impl EntityKind {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=50 | 100..=149 | 200..=279 | 300 | 400..=437 | 500..=503 | 600..=603 | 700 => {
Some(unsafe { mem::transmute(raw) })
}
_ => None,
}
}
fn from_raw_infallible(raw: c_int) -> Self {
// What to return if the variant isn't recognized and we can't signal this to the caller?
// There's no perfect answer, but one of the Unexposed variants is certainly reasonable.
Self::from_raw(raw).unwrap_or(EntityKind::UnexposedDecl)
}
}
// EntityVisitResult _____________________________
/// Indicates how a entity visitation should proceed.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum EntityVisitResult {
/// Do not continue visiting entities.
Break = 0,
/// Continue visiting sibling entities iteratively, skipping child entities.
Continue = 1,
/// Continue visiting sibling and child entities recursively, children first.
Recurse = 2,
}
// EvaluationResult ______________________________
/// The result of evaluating an expression.
#[cfg(feature="gte_clang_3_9")]
#[derive(Clone, Debug, PartialEq)]
pub enum EvaluationResult {
/// An evaluation result whose specific type is not exposed via this interface.
Unexposed,
/// A signed integer evaluation result.
SignedInteger(i64),
/// An unsigned integer evaluation result.
///
/// Only produced by `libclang` 4.0 and later. Earlier versions will always return
/// `SignedInteger` for integers.
UnsignedInteger(u64),
/// A floating point number evaluation result.
Float(f64),
/// A string literal evaluation result.
String(CString),
/// An Objective-C string literal evaluation result.
ObjCString(CString),
/// An Objective-C `CFString` evaluation result.
CFString(CString),
/// Any other evaluation result whose value can be represented by a string.
Other(CString),
}
// ExceptionSpecification ________________________
/// Indicates the exception specification of a function.
#[cfg(feature="gte_clang_5_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum ExceptionSpecification {
/// The function has a basic `noexcept` specification.
BasicNoexcept = 4,
/// The function has a computed `noexcept` specification.
ComputedNoexcept = 5,
/// The function has a `throw(T1, T2)` specification.
Dynamic = 2,
/// The function has a `throw(...)` specification.
DynamicAny = 3,
/// The function has a `throw()` specification.
DynamicNone = 1,
/// The function has an exception specification that has not yet been evaluated.
Unevaluated = 6,
/// The function has an exception specification that has not yet been instantiated.
Uninstantiated = 7,
/// The function has an exception specification that has not yet been parsed.
Unparsed = 8,
}
#[cfg(feature="gte_clang_5_0")]
impl ExceptionSpecification {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=8 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// Language ______________________________________
/// Indicates the language used by a declaration.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Language {
/// The declaration uses the C programming language.
C = 1,
/// The declaration uses the C++ programming language.
Cpp = 3,
/// The declaration uses the Objective-C programming language.
ObjectiveC = 2,
/// The declaration uses the Swift programming language.
///
/// Only produced by `libclang` 5.0 and later.
Swift = 4,
}
impl Language {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=4 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// Linkage _______________________________________
/// Indicates the linkage of an AST entity.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Linkage {
/// The AST entity has automatic storage (e.g., variables or parameters).
Automatic = 1,
/// The AST entity is a static variable or static function.
Internal = 2,
/// The AST entity has external linkage.
External = 4,
/// The AST entity has external linkage and lives in a C++ anonymous namespace.
UniqueExternal = 3,
}
impl Linkage {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=4 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// MemoryUsage ___________________________________
/// Indicates the usage category of a quantity of memory.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum MemoryUsage {
/// Expressions, declarations, and types.
Ast = 1,
/// Various tables used by the AST.
AstSideTables = 6,
/// Memory allocated with `malloc` for external AST sources.
ExternalAstSourceMalloc = 9,
/// Memory allocated with `mmap` for external AST sources.
ExternalAstSourceMMap = 10,
/// Cached global code completion results.
GlobalCodeCompletionResults = 4,
/// Identifiers.
Identifiers = 2,
/// The preprocessing record.
PreprocessingRecord = 12,
/// Memory allocated with `malloc` for the preprocessor.
Preprocessor = 11,
/// Header search tables.
PreprocessorHeaderSearch = 14,
/// Selectors.
Selectors = 3,
/// The content cache used by the source manager.
SourceManagerContentCache = 5,
/// Data structures used by the source manager.
SourceManagerDataStructures = 13,
/// Memory allocated with `malloc` for the source manager.
SourceManagerMalloc = 7,
/// Memory allocated with `mmap` for the source manager.
SourceManagerMMap = 8,
}
impl MemoryUsage {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=14 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// Nullability ___________________________________
/// Indicates the nullability of a pointer type.
#[cfg(feature="gte_clang_8_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Nullability {
/// Values of this type can never be null.
NonNull = 0,
/// Values of this type can be null.
Nullable = 1,
/// Whether values of this type can be null is (explicitly) unspecified.
Unspecified = 2,
}
#[cfg(feature="gte_clang_8_0")]
impl Nullability {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
0..=2 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// PrintingPolicyFlag ____________________________
/// Flags for the printing policy.
#[cfg(feature="gte_clang_7_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum PrintingPolicyFlag {
/// Whether to suppress printing specifiers for a given type or declaration.
SuppressSpecifiers = 1,
/// Whether to suppress printing the tag keyword.
SuppressTagKeyword = 2,
/// Whether to include the body of a tag definition.
IncludeTagDefinition = 3,
/// Whether to suppress printing of scope specifiers.
SuppressScope = 4,
/// Whether to suppress printing the parts of scope specifiers that don't need to be written.
SuppressUnwrittenScope = 5,
/// Whether to suppress printing of variable initializers.
SuppressInitializers = 6,
/// Whether to print the size of constant array expressions as written.
PrintConstantArraySizeAsWritten = 7,
/// Whether to print the location of anonymous tags.
PrintAnonymousTagLocations = 8,
/// Whether to suppress printing the __strong lifetime qualifier in ARC.
SuppressStrongLifetime = 9,
/// Whether to suppress printing lifetime qualifiers in ARC.
SuppressLifetimeQualifiers = 10,
/// Whether to suppress printing template arguments in names of C++ constructors.
SuppressTemplateArgsInCXXConstructors = 11,
/// Whether to print 'bool' rather than '_Bool'.
UseBool = 12,
/// Whether to print 'restrict' rather than '__restrict'
UseRestrict = 13,
/// Whether to print 'alignof' rather than '__alignof'
UseAlignof = 14,
/// Whether to print '_Alignof' rather than '__alignof'
UseUnderscoreAlignof = 15,
/// Whether to print '(void)' rather then '()' for a function prototype with zero parameters.
UseVoidForZeroParams = 16,
/// Whether to print terse output.
UseTerseOutput = 17,
/// Whether to do certain refinements needed for producing a proper declaration tag.
PolishForDeclaration = 18,
/// Whether to print 'half' rather than '__fp16'
UseHalf = 19,
/// Whether to print the built-in wchar_t type as '__wchar_t'
UseMsWchar = 20,
/// Whether to include newlines after statements.
IncludeNewlines = 21,
/// Whether to use whitespace and punctuation like MSVC does.
UseMsvcFormatting = 22,
/// Whether to print constant expressions as written.
PrintConstantsAsWritten = 23,
/// Whether to suppress printing the implicit 'self' or 'this' expressions.
SuppressImplicitBase = 24,
/// Whether to print the fully qualified name of function declarations.
PrintFullyQualifiedName = 25,
}
#[cfg(feature="gte_clang_7_0")]
impl PrintingPolicyFlag {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=25 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// RefQualifier __________________________________
/// Indicates the ref qualifier of a C++ function or method type.
#[cfg_attr(feature="cargo-clippy", allow(enum_variant_names))]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum RefQualifier {
/// The function or method has an l-value ref qualifier (`&`).
LValue = 1,
/// The function or method has an r-value ref qualifier (`&&`).
RValue = 2,
}
impl RefQualifier {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=2 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// StorageClass __________________________________
/// Indicates the storage class of a declaration.
#[cfg(feature="gte_clang_3_6")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum StorageClass {
/// The declaration does not specifiy a storage duration and therefore has an automatic storage
/// duration.
None = 1,
/// The declaration specifies an automatic storage duration.
Auto = 6,
/// The declaration specifies an automatic storage duration and that it should be stored in a
/// CPU register
Register = 7,
/// The declaration specifies a static storage duration and internal linkage.
Static = 3,
/// The declaration specifies a static storage duration and external linkage.
Extern = 2,
/// The declaration specifies a static storage duration and external linkage but is not
/// accessible outside the containing translation unit.
PrivateExtern = 4,
/// The declaration specifies a storage duration related to an OpenCL work group.
OpenClWorkGroupLocal = 5,
}
#[cfg(feature="gte_clang_3_6")]
impl StorageClass {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=7 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// TemplateArgument ______________________________
/// An argument to a template function specialization.
#[cfg(feature="gte_clang_3_6")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum TemplateArgument<'tu> {
/// A declaration for a pointer, reference, or member pointer non-type template parameter.
Declaration,
/// An expression that has not yet been resolved
Expression,
/// An empty template argument (e.g., one that has not yet been deduced).
Null,
/// A null pointer or null member pointer provided for a non-type template parameter.
Nullptr,
/// A parameter pack.
Pack,
/// A name for a template provided for a template template parameter.
Template,
/// A pack expansion of a name for a template provided for a template template parameter.
TemplateExpansion,
/// An integer.
Integral(i64, u64),
/// A type.
Type(Type<'tu>),
}
// TlsKind _______________________________________
/// Indicates the thread-local storage (TLS) kind of a declaration.
#[cfg(feature="gte_clang_6_0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum TlsKind {
/// The declaration uses dynamic TLS.
Dynamic = 1,
/// The declaration uses static TLS.
Static = 2,
}
#[cfg(feature="gte_clang_6_0")]
impl TlsKind {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=2 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
// TypeKind ______________________________________
/// Indicates the categorization of a type.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum TypeKind {
/// A type whose specific kind is not exposed via this interface.
Unexposed = 1,
/// `void`
Void = 2,
/// `bool` (C++) or `_Bool` (C99)
Bool = 3,
/// The `char` type when it is signed by default.
CharS = 13,
/// The `char` type when it is unsigned by default.
CharU = 4,
/// `signed char`
SChar = 14,
/// `unsigned char`
UChar = 5,
/// `wchar_t`
WChar = 15,
/// `char16_t`
Char16 = 6,
/// `char32_t`
Char32 = 7,
/// `short`
Short = 16,
/// `unsigned short`
UShort = 8,
/// `int`
Int = 17,
/// `unsigned int`
UInt = 9,
/// `long`
Long = 18,
/// `unsigned long`
ULong = 10,
/// `long long`
LongLong = 19,
/// `unsigned long long`
ULongLong = 11,
/// `__int128_t`
Int128 = 20,
/// `__uint128_t`
UInt128 = 12,
/// A half-precision (16-bit) floating point type.
///
/// Only produced by `libclang` 5.0 and later.
Half = 31,
/// A half-precision (16-bit) floating point type.
///
/// Only produced by `libclang` 6.0 and later.
Float16 = 32,
/// `short _Accum`
///
/// Only produced by `libclang` 7.0 and later.
ShortAccum = 33,
/// `_Accum`
///
/// Only produced by `libclang` 7.0 and later.
Accum = 34,
/// `long _Accum`
///
/// Only produced by `libclang` 7.0 and later.
LongAccum = 35,
/// `unsigned short _Accum`
///
/// Only produced by `libclang` 7.0 and later.
UShortAccum = 36,
/// `unsigned _Accum`
///
/// Only produced by `libclang` 7.0 and later.
UAccum = 37,
/// `unsigned long _Accum`
///
/// Only produced by `libclang` 7.0 and later.
ULongAccum = 38,
/// `float`
Float = 21,
/// `double`
Double = 22,
/// `long double`
LongDouble = 23,
/// `nullptr_t` (C++11)
Nullptr = 24,
/// A C99 complex type (e.g., `_Complex float`).
Complex = 100,
/// An unknown dependent type.
Dependent = 26,
/// The type of an unresolved overload set.
Overload = 25,
/// `id` (Objective-C)
ObjCId = 27,
/// `Class` (Objective-C)
ObjCClass = 28,
/// `SEL` (Objective-C)
ObjCSel = 29,
/// `__float128`
///
/// Only produced by `libclang` 3.9 and later.
Float128 = 30,
/// An Objective-C interface type.
ObjCInterface = 108,
/// An Objective-C pointer to object type.
ObjCObjectPointer = 109,
/// A pointer type.
Pointer = 101,
/// A block pointer type (e.g., `void (^)(int)`).
BlockPointer = 102,
/// A pointer to a record member type.
MemberPointer = 117,
/// An l-value reference (e.g. `int&`).
LValueReference = 103,
/// An r-value reference (e.g. `int&&`).
RValueReference = 104,
/// An enum type.
Enum = 106,
/// A record type such as a struct or a class.
Record = 105,
/// A typedef.
Typedef = 107,
/// A function prototype with parameter type information (e.g., `void foo(int)`).
FunctionPrototype = 111,
/// A function prototype without parameter type information (e.g., `void foo()`).
FunctionNoPrototype = 110,
/// An array type with a specified size that is an integer constant expression.
ConstantArray = 112,
/// An array type with a specified size that is a dependent value.
DependentSizedArray = 116,
/// An array type without a specified size.
IncompleteArray = 114,
/// An array type with a specified size that is not an integer constant expression.
VariableArray = 115,
/// A GCC generic vector type.
Vector = 113,
/// A C++11 `decltype(auto)` type.
///
/// Only produced by `libclang` 3.8 and later.
Auto = 118,
/// A type that was referred to using an elaborated type keyword (e.g., `struct S`).
///
/// Only produced by `libclang` 3.9 and later.
Elaborated = 119,
/// An OpenCL pipe type.
///
/// Only produced by `libclang` 5.0 and later.
Pipe = 120,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dRO = 121,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dArrayRO = 122,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dBufferRO = 123,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dRO = 124,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayRO = 125,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dDepthRO = 126,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayDepthRO = 127,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAARO = 128,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAARO = 129,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAADepthRO = 130,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAADepthRO = 131,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage3dRO = 132,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dWO = 133,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dArrayWO = 134,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dBufferWO = 135,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dWO = 136,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayWO = 137,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dDepthWO = 138,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayDepthWO = 139,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAAWO = 140,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAAWO = 141,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAADepthWO = 142,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAADepthWO = 143,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage3dWO = 144,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dRW = 145,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dArrayRW = 146,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage1dBufferRW = 147,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dRW = 148,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayRW = 149,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dDepthRW = 150,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayDepthRW = 151,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAARW = 152,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAARW = 153,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dMSAADepthRW = 154,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage2dArrayMSAADepthRW = 155,
/// An OpenCL image type.
///
/// Only produced by `libclang` 5.0 and later.
OCLImage3dRW = 156,
/// An OpenCL sampler type.
///
/// Only produced by `libclang` 5.0 and later.
OCLSampler = 157,
/// An OpenCL event type.
///
/// Only produced by `libclang` 5.0 and later.
OCLEvent = 158,
/// An OpenCL queue type.
///
/// Only produced by `libclang` 5.0 and later.
OCLQueue = 159,
/// An OpenCL reserve ID type.
///
/// Only produced by `libclang` 5.0 and later.
OCLReserveID = 160,
/// An Objective-C object type.
///
/// Only produced by `libclang` 8.0 and later.
ObjCObject = 161,
/// An Objective-C type param.
///
/// Only produced by `libclang` 8.0 and later.
ObjCTypeParam = 162,
/// An attributed type.
///
/// Only produced by `libclang` 8.0 and later.
Attributed = 163,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCMcePayload = 164,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImePayload = 165,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCRefPayload = 166,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCSicPayload = 167,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCMceResult = 168,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeResult = 169,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCRefResult = 170,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCSicResult = 171,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeResultSingleRefStreamout = 172,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeResultDualRefStreamout = 173,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeSingleRefStreamin = 174,
/// An Intel OpenCL extension type for the AVC VME media sampler in Intel graphics processors.
///
/// Only produced by `libclang` 8.0 and later.
OCLIntelSubgroupAVCImeDualRefStreamin = 175,
}
impl TypeKind {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=38 | 101..=175 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
fn from_raw_infallible(raw: c_int) -> Self {
Self::from_raw(raw).unwrap_or(TypeKind::Unexposed)
}
}
// Visibility ____________________________________
/// Indicates the linker visibility of an AST element.
#[cfg(feature="gte_clang_3_8")]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(C)]
pub enum Visibility {
/// The AST element can be seen by the linker.
Default = 3,
/// The AST element cannot be seen by the linker.
Hidden = 1,
/// The AST element can be seen by the linker but resolves to a symbol inside this object.
Protected = 2,
}
#[cfg(feature="gte_clang_3_8")]
impl Visibility {
fn from_raw(raw: c_int) -> Option<Self> {
match raw {
1..=3 => Some(unsafe { mem::transmute(raw) }),
_ => None,
}
}
}
//================================================
// Structs
//================================================
// Clang _________________________________________
static AVAILABLE: AtomicBool = AtomicBool::new(true);
/// An empty type which prevents the use of this library from multiple threads simultaneously.
#[derive(Debug)]
pub struct Clang;
impl Clang {
//- Constructors -----------------------------
/// Constructs a new `Clang`.
///
/// Only one instance of `Clang` is allowed at a time.
///
/// # Failures
///
/// * an instance of `Clang` already exists
/// * a `libclang` shared library could not be found
/// * a `libclang` shared library symbol could not be loaded
#[cfg(feature="runtime")]
pub fn new() -> Result<Clang, String> {
if AVAILABLE.swap(false, atomic::Ordering::SeqCst) {
load().map(|_| Clang)
} else {
Err("an instance of `Clang` already exists".into())
}
}
/// Constructs a new `Clang`.
///
/// Only one instance of `Clang` is allowed at a time.
///
/// # Failures
///
/// * an instance of `Clang` already exists
#[cfg(not(feature="runtime"))]
pub fn new() -> Result<Clang, String> {
if AVAILABLE.swap(false, atomic::Ordering::SeqCst) {
Ok(Clang)
} else {
Err("an instance of `Clang` already exists".into())
}
}
}
#[cfg(feature="runtime")]
impl Drop for Clang {
fn drop(&mut self) {
unload().unwrap();
AVAILABLE.store(true, atomic::Ordering::SeqCst);
}
}
#[cfg(not(feature="runtime"))]
impl Drop for Clang {
fn drop(&mut self) {
AVAILABLE.store(true, atomic::Ordering::SeqCst);
}
}
// CompilationDatabase ________________________________________
/// A compilation database of all information used to compile files in a project.
#[derive(Debug)]
pub struct CompilationDatabase {
ptr: CXCompilationDatabase,
}
impl CompilationDatabase {
/// Creates a compilation database from the database found in the given directory.
pub fn from_directory<P: AsRef<Path>>(path: P) -> Result<CompilationDatabase, ()> {
let path = utility::from_path(path);
unsafe {
let mut error: CXCompilationDatabase_Error = mem::uninitialized();
let ptr = clang_CompilationDatabase_fromDirectory(path.as_ptr(), &mut error);
match error {
CXCompilationDatabase_NoError => Ok(CompilationDatabase { ptr }),
CXCompilationDatabase_CanNotLoadDatabase => Err(()),
_ => unreachable!(),
}
}
}
/// Get all the compile commands from the database.
pub fn get_all_compile_commands(&self) -> CompileCommands {
unsafe {
CompileCommands::from_ptr(clang_CompilationDatabase_getAllCompileCommands(self.ptr))
}
}
/// Find the compile commands for the given file.
pub fn get_compile_commands<P: AsRef<Path>>(&self, path: P) -> Result<CompileCommands, ()> {
// Presumably this returns null if we can't find the given path?
// The Clang docs don't specify.
let path = utility::from_path(path);
let ptr = unsafe { clang_CompilationDatabase_getCompileCommands(self.ptr, path.as_ptr()) };
ptr.map(CompileCommands::from_ptr).ok_or(())
}
}
impl Drop for CompilationDatabase {
fn drop(&mut self) {
unsafe {
clang_CompilationDatabase_dispose(self.ptr);
}
}
}
/// The result of a search in a CompilationDatabase
#[derive(Debug)]
pub struct CompileCommands {
ptr: CXCompileCommands,
}
impl CompileCommands {
fn from_ptr(ptr: CXCompileCommands) -> CompileCommands {
assert!(!ptr.is_null());
CompileCommands { ptr }
}
/// Returns all commands for this search
pub fn get_commands(&self) -> Vec<CompileCommand> {
iter!(
clang_CompileCommands_getSize(self.ptr),
clang_CompileCommands_getCommand(self.ptr),
)
.map(|p| CompileCommand::from_ptr(self, p))
.collect()
}
}
impl Drop for CompileCommands {
fn drop(&mut self) {
unsafe {
clang_CompileCommands_dispose(self.ptr);
}
}
}
/// A compile comand from CompilationDatabase
#[derive(Debug, Copy, Clone)]
pub struct CompileCommand<'cmds> {
ptr: CXCompileCommand,
_marker: PhantomData<&'cmds CompileCommands>,
}
impl<'cmds> CompileCommand<'cmds> {
fn from_ptr(_: &'cmds CompileCommands, ptr: CXCompileCommand) -> CompileCommand<'cmds> {
assert!(!ptr.is_null());
CompileCommand {
ptr,
_marker: PhantomData,
}
}
/// Get the working directory where the command was executed.
pub fn get_directory(&self) -> PathBuf {
utility::to_path(unsafe { clang_CompileCommand_getDirectory(self.ptr) })
}
/// Get the filename associated with the command.
#[cfg(feature="gte_clang_3_8")]
pub fn get_filename(&self) -> PathBuf {
utility::to_path(unsafe { clang_CompileCommand_getFilename(self.ptr) })
}
/// Get all arguments passed to the command.
pub fn get_arguments(&self) -> Vec<String> {
iter!(
clang_CompileCommand_getNumArgs(self.ptr),
clang_CompileCommand_getArg(self.ptr),
)
.map(utility::to_string)
.collect()
}
// TODO: Args, mapped source path, mapped sourth context.
}
// Entity ________________________________________
/// An AST entity.
#[derive(Copy, Clone)]
pub struct Entity<'tu> {
raw: CXCursor,
tu: &'tu TranslationUnit<'tu>,
}
impl<'tu> Entity<'tu> {
//- Constructors -----------------------------
fn from_raw(raw: CXCursor, tu: &'tu TranslationUnit<'tu>) -> Entity<'tu> {
Entity { raw, tu }
}
//- Accessors --------------------------------
/// Evaluates this AST entity, if possible.
#[cfg(feature="gte_clang_3_9")]
pub fn evaluate(&self) -> Option<EvaluationResult> {
macro_rules! string {
($eval:expr) => {
std::ffi::CStr::from_ptr(clang_EvalResult_getAsStr($eval)).to_owned()
};
}
#[cfg(feature="gte_clang_4_0")]
unsafe fn evaluate_integer(e: CXEvalResult) -> EvaluationResult {
if clang_EvalResult_isUnsignedInt(e) != 0 {
EvaluationResult::UnsignedInteger(clang_EvalResult_getAsUnsigned(e) as u64)
} else {
EvaluationResult::SignedInteger(clang_EvalResult_getAsLongLong(e) as i64)
}
}
#[cfg(not(feature="gte_clang_4_0"))]
unsafe fn evaluate_integer(e: CXEvalResult) -> EvaluationResult {
EvaluationResult::SignedInteger(clang_EvalResult_getAsInt(e) as i64)
}
unsafe {
clang_Cursor_Evaluate(self.raw).map(|e| {
assert!(!e.is_null());
let result = match clang_EvalResult_getKind(e) {
CXEval_UnExposed => EvaluationResult::Unexposed,
CXEval_Int => evaluate_integer(e),
CXEval_Float => EvaluationResult::Float(clang_EvalResult_getAsDouble(e) as f64),
CXEval_ObjCStrLiteral => EvaluationResult::ObjCString(string!(e)),
CXEval_StrLiteral => EvaluationResult::String(string!(e)),
CXEval_CFStr => EvaluationResult::CFString(string!(e)),
CXEval_Other => EvaluationResult::Other(string!(e)),
_ => panic!("unexpected eval result: {:?}", e),
};
clang_EvalResult_dispose(e);
result
})
}
}
/// Returns the categorization of this AST entity.
pub fn get_kind(&self) -> EntityKind {
EntityKind::from_raw_infallible(unsafe { clang_getCursorKind(self.raw) })
}
/// Returns the display name of this AST entity, if any.
///
/// The display name of an entity contains additional information that helps identify the
/// entity.
pub fn get_display_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getCursorDisplayName(self.raw)) }
}
#[cfg(feature="gte_clang_7_0")]
/// Returns the pretty printer for this declaration.
pub fn get_pretty_printer(&self) -> PrettyPrinter {
unsafe { PrettyPrinter::from_raw(clang_getCursorPrintingPolicy(self.raw), self) }
}
/// Returns the source location of this AST entity, if any.
pub fn get_location(&self) -> Option<SourceLocation<'tu>> {
unsafe { clang_getCursorLocation(self.raw).map(|l| SourceLocation::from_raw(l, self.tu)) }
}
/// Returns the source range of this AST entity, if any.
pub fn get_range(&self) -> Option<SourceRange<'tu>> {
unsafe { clang_getCursorExtent(self.raw).map(|r| SourceRange::from_raw(r, self.tu)) }
}
/// Returns the accessibility of this declaration or base class specifier, if applicable.
pub fn get_accessibility(&self) -> Option<Accessibility> {
unsafe {
match clang_getCXXAccessSpecifier(self.raw) {
CX_CXXInvalidAccessSpecifier => None,
other => Accessibility::from_raw(other),
}
}
}
/// Returns the arguments of this function or method, if applicable.
pub fn get_arguments(&self) -> Option<Vec<Entity<'tu>>> {
iter_option!(
clang_Cursor_getNumArguments(self.raw),
clang_Cursor_getArgument(self.raw),
).map(|i| i.map(|a| Entity::from_raw(a, self.tu)).collect())
}
/// Returns the availability of this AST entity.
pub fn get_availability(&self) -> Availability {
Availability::from_raw(unsafe {clang_getCursorAvailability(self.raw) }).unwrap()
}
/// Returns the width of this bit field, if applicable.
pub fn get_bit_field_width(&self) -> Option<usize> {
unsafe {
let width = clang_getFieldDeclBitWidth(self.raw);
if width >= 0 {
Some(width as usize)
} else {
None
}
}
}
/// Returns the canonical entity for this AST entity.
///
/// In the C family of languages, some types of entities can be declared multiple times. When
/// there are multiple declarations of the same entity, only one will be considered canonical.
pub fn get_canonical_entity(&self) -> Entity<'tu> {
unsafe { Entity::from_raw(clang_getCanonicalCursor(self.raw), self.tu) }
}
/// Returns the comment associated with this AST entity, if any.
pub fn get_comment(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Cursor_getRawCommentText(self.raw)) }
}
/// Returns the parsed comment associated with this declaration, if application.
pub fn get_parsed_comment(&self) -> Option<Comment<'tu>> {
unsafe { clang_Cursor_getParsedComment(self.raw).map(Comment::from_raw) }
}
/// Returns the brief of the comment associated with this AST entity, if any.
pub fn get_comment_brief(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Cursor_getBriefCommentText(self.raw)) }
}
/// Returns the source range of the comment associated with this AST entity, if any.
pub fn get_comment_range(&self) -> Option<SourceRange<'tu>> {
unsafe { clang_Cursor_getCommentRange(self.raw).map(|r| SourceRange::from_raw(r, self.tu)) }
}
/// Returns a completion string for this declaration or macro definition, if applicable.
pub fn get_completion_string(&self) -> Option<CompletionString> {
unsafe { clang_getCursorCompletionString(self.raw).map(CompletionString::from_ptr) }
}
/// Returns the child of this AST entity with the supplied index.
pub fn get_child(&self, mut index: usize) -> Option<Entity<'tu>> {
let mut child = None;
self.visit_children(|c, _| {
if index == 0 {
child = Some(c);
EntityVisitResult::Break
} else {
index -= 1;
EntityVisitResult::Continue
}
});
child
}
/// Returns the children of this AST entity.
pub fn get_children(&self) -> Vec<Entity<'tu>> {
let mut children = vec![];
self.visit_children(|c, _| {
children.push(c);
EntityVisitResult::Continue
});
children
}
/// Returns the AST entity that describes the definition of this AST entity, if any.
pub fn get_definition(&self) -> Option<Entity<'tu>> {
unsafe { clang_getCursorDefinition(self.raw).map(|p| Entity::from_raw(p, self.tu)) }
}
/// Returns the value of this enum constant declaration, if applicable.
pub fn get_enum_constant_value(&self) -> Option<(i64, u64)> {
unsafe {
if self.get_kind() == EntityKind::EnumConstantDecl {
let signed = clang_getEnumConstantDeclValue(self.raw);
let unsigned = clang_getEnumConstantDeclUnsignedValue(self.raw);
Some((signed, unsigned))
} else {
None
}
}
}
/// Returns the underlying type of this enum declaration, if applicable.
pub fn get_enum_underlying_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getEnumDeclIntegerType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the exception specification of this AST entity, if applicable.
#[cfg(feature="gte_clang_5_0")]
pub fn get_exception_specification(&self) -> Option<ExceptionSpecification> {
unsafe {
match clang_getCursorExceptionSpecificationType(self.raw) {
-1 | CXCursor_ExceptionSpecificationKind_None => None,
other => ExceptionSpecification::from_raw(other),
}
}
}
/// Returns the `external_source_symbol` attribute attached to this AST entity, if any.
#[cfg(feature="gte_clang_5_0")]
pub fn get_external_symbol(&self) -> Option<ExternalSymbol> {
unsafe {
let mut language: CXString = mem::uninitialized();
let mut defined: CXString = mem::uninitialized();
let mut generated: c_uint = 0;
if clang_Cursor_isExternalSymbol(self.raw, &mut language, &mut defined, &mut generated) != 0 {
Some(ExternalSymbol {
language: utility::to_string(language),
defined: utility::to_string(defined),
generated: generated != 0
})
} else {
None
}
}
}
/// Returns the file included by this inclusion directive, if applicable.
pub fn get_file(&self) -> Option<File<'tu>> {
unsafe { clang_getIncludedFile(self.raw).map(|f| File::from_ptr(f, self.tu)) }
}
/// Returns the language used by this declaration, if applicable.
pub fn get_language(&self) -> Option<Language> {
unsafe {
match clang_getCursorLanguage(self.raw) {
CXLanguage_Invalid => None,
other => Language::from_raw(other),
}
}
}
/// Returns the lexical parent of this AST entity, if any.
pub fn get_lexical_parent(&self) -> Option<Entity<'tu>> {
unsafe { clang_getCursorLexicalParent(self.raw).map(|p| Entity::from_raw(p, self.tu)) }
}
/// Returns the linkage of this AST entity, if any.
pub fn get_linkage(&self) -> Option<Linkage> {
unsafe {
match clang_getCursorLinkage(self.raw) {
CXLinkage_Invalid => None,
other => Linkage::from_raw(other),
}
}
}
/// Returns the mangled name of this AST entity, if any.
#[cfg(feature="gte_clang_3_6")]
pub fn get_mangled_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Cursor_getMangling(self.raw)) }
}
/// Returns the mangled names of this C++ constructor or destructor, if applicable.
#[cfg(feature="gte_clang_3_8")]
pub fn get_mangled_names(&self) -> Option<Vec<String>> {
unsafe { utility::to_string_set_option(clang_Cursor_getCXXManglings(self.raw)) }
}
/// Returns the mangled names of this Objective-C class interface or implementation, if applicable.
#[cfg(feature="gte_clang_6_0")]
pub fn get_mangled_objc_names(&self) -> Option<Vec<String>> {
unsafe { utility::to_string_set_option(clang_Cursor_getObjCManglings(self.raw)) }
}
/// Returns the module imported by this module import declaration, if applicable.
pub fn get_module(&self) -> Option<Module<'tu>> {
unsafe { clang_Cursor_getModule(self.raw).map(|m| Module::from_ptr(m, self.tu)) }
}
/// Returns the name of this AST entity, if any.
pub fn get_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getCursorSpelling(self.raw)) }
}
/// Returns the source ranges of the name of this AST entity.
pub fn get_name_ranges(&self) -> Vec<SourceRange<'tu>> {
unsafe {
(0..).map(|i| clang_Cursor_getSpellingNameRange(self.raw, i, 0)).take_while(|r| {
if clang_Range_isNull(*r) != 0 {
false
} else {
let range = clang_getRangeStart(*r);
let mut file = ptr::null_mut();
let null = ptr::null_mut();
clang_getSpellingLocation(range, &mut file, null, null, null);
!file.is_null()
}
}).map(|r| SourceRange::from_raw(r, self.tu)).collect()
}
}
/// Returns which attributes were applied to this Objective-C property, if applicable.
pub fn get_objc_attributes(&self) -> Option<ObjCAttributes> {
let attributes = unsafe { clang_Cursor_getObjCPropertyAttributes(self.raw, 0) };
if attributes != 0 {
Some(ObjCAttributes::from(attributes))
} else {
None
}
}
/// Returns the name of the method implementing the getter for this Objective-C property, if applicable
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_getter_name(&self) -> Option<String> {
utility::to_string_option(unsafe { clang_Cursor_getObjCPropertyGetterName(self.raw) })
}
/// Returns the element type for this Objective-C `iboutletcollection` attribute, if applicable.
pub fn get_objc_ib_outlet_collection_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getIBOutletCollectionType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the type of the receiver of this Objective-C message, if applicable.
pub fn get_objc_receiver_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Cursor_getReceiverType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the selector index for this Objective-C selector identifier, if applicable.
pub fn get_objc_selector_index(&self) -> Option<usize> {
let index = unsafe { clang_Cursor_getObjCSelectorIndex(self.raw) };
if index >= 0 {
Some(index as usize)
} else {
None
}
}
/// Returns the name of the method implementing the setter for this Objective-C property, if applicable
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_setter_name(&self) -> Option<String> {
utility::to_string_option(unsafe { clang_Cursor_getObjCPropertySetterName(self.raw) })
}
/// Returns the type encoding for this Objective-C declaration, if applicable.
pub fn get_objc_type_encoding(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getDeclObjCTypeEncoding(self.raw)) }
}
/// Returns which qualifiers were applied to this Objective-C method return or parameter type,
/// if applicable.
pub fn get_objc_qualifiers(&self) -> Option<ObjCQualifiers> {
let qualifiers = unsafe { clang_Cursor_getObjCDeclQualifiers(self.raw) };
if qualifiers != 0 {
Some(ObjCQualifiers::from(qualifiers))
} else {
None
}
}
/// Returns the the offset of this field, if applicable.
#[cfg(feature="gte_clang_3_7")]
pub fn get_offset_of_field(&self) -> Result<usize, OffsetofError> {
let offsetof_ = unsafe { clang_Cursor_getOffsetOfField(self.raw) };
OffsetofError::from_error(offsetof_).map(|_| offsetof_ as usize)
}
/// Returns the overloaded declarations referenced by this overloaded declaration reference, if
/// applicable.
pub fn get_overloaded_declarations(&self) -> Option<Vec<Entity<'tu>>> {
let declarations = iter!(
clang_getNumOverloadedDecls(self.raw),
clang_getOverloadedDecl(self.raw),
).map(|e| Entity::from_raw(e, self.tu)).collect::<Vec<_>>();
if !declarations.is_empty() {
Some(declarations)
} else {
None
}
}
/// Returns the methods that were overridden by this method, if applicable.
pub fn get_overridden_methods(&self) -> Option<Vec<Entity<'tu>>> {
unsafe {
let (mut raw, mut count) = (ptr::null_mut(), 0);
clang_getOverriddenCursors(self.raw, &mut raw, &mut count);
if !raw.is_null() {
let raws = slice::from_raw_parts(raw, count as usize);
let methods = raws.iter().map(|e| Entity::from_raw(*e, self.tu)).collect();
clang_disposeOverriddenCursors(raw);
Some(methods)
} else {
None
}
}
}
/// Returns the availability of this declaration on the platforms where it is known, if
/// applicable.
pub fn get_platform_availability(&self) -> Option<Vec<PlatformAvailability>> {
if !self.is_declaration() {
return None;
}
unsafe {
let mut buffer: [CXPlatformAvailability; 32] = [CXPlatformAvailability::default(); 32];
let count = clang_getCursorPlatformAvailability(
self.raw,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
(&mut buffer).as_mut_ptr(),
buffer.len() as c_int,
);
Some((0..count as usize).map(|i| PlatformAvailability::from_raw(buffer[i])).collect())
}
}
/// Returns the AST entity referred to by this AST entity, if any.
pub fn get_reference(&self) -> Option<Entity<'tu>> {
unsafe { clang_getCursorReferenced(self.raw).map(|p| Entity::from_raw(p, self.tu)) }
}
/// Returns the semantic parent of this AST entity, if any.
pub fn get_semantic_parent(&self) -> Option<Entity<'tu>> {
let parent = unsafe { clang_getCursorSemanticParent(self.raw) };
parent.map(|p| Entity::from_raw(p, self.tu))
}
/// Returns the storage class of this declaration, if applicable.
#[cfg(feature="gte_clang_3_6")]
pub fn get_storage_class(&self) -> Option<StorageClass> {
unsafe {
match clang_Cursor_getStorageClass(self.raw) {
CX_SC_Invalid => None,
other => StorageClass::from_raw(other),
}
}
}
/// Returns the template declaration this template specialization was instantiated from, if
/// applicable.
pub fn get_template(&self) -> Option<Entity<'tu>> {
let parent = unsafe { clang_getSpecializedCursorTemplate(self.raw) };
parent.map(|p| Entity::from_raw(p, self.tu))
}
/// Returns the template arguments for this template function specialization, if applicable.
#[cfg(feature="gte_clang_3_6")]
pub fn get_template_arguments(&self) -> Option<Vec<TemplateArgument<'tu>>> {
let get_type = &clang_Cursor_getTemplateArgumentType;
let get_signed = &clang_Cursor_getTemplateArgumentValue;
let get_unsigned = &clang_Cursor_getTemplateArgumentUnsignedValue;
iter_option!(
clang_Cursor_getNumTemplateArguments(self.raw),
clang_Cursor_getTemplateArgumentKind(self.raw),
).map(|i| {
i.enumerate().map(|(i, t)| {
match t {
CXTemplateArgumentKind_Null => TemplateArgument::Null,
CXTemplateArgumentKind_Type => {
let type_ = unsafe { get_type(self.raw, i as c_uint) };
TemplateArgument::Type(Type::from_raw(type_, self.tu))
},
CXTemplateArgumentKind_Declaration => TemplateArgument::Declaration,
CXTemplateArgumentKind_NullPtr => TemplateArgument::Nullptr,
CXTemplateArgumentKind_Integral => {
let signed = unsafe { get_signed(self.raw, i as c_uint) };
let unsigned = unsafe { get_unsigned(self.raw, i as c_uint) };
TemplateArgument::Integral(signed as i64, unsigned as u64)
},
CXTemplateArgumentKind_Template => TemplateArgument::Template,
CXTemplateArgumentKind_TemplateExpansion => TemplateArgument::TemplateExpansion,
CXTemplateArgumentKind_Expression => TemplateArgument::Expression,
CXTemplateArgumentKind_Pack => TemplateArgument::Pack,
_ => unreachable!(),
}
}).collect()
})
}
/// Returns the categorization of the template specialization that would result from
/// instantiating this template declaration, if applicable.
pub fn get_template_kind(&self) -> Option<EntityKind> {
unsafe {
match clang_getTemplateCursorKind(self.raw) {
CXCursor_NoDeclFound => None,
other => EntityKind::from_raw(other),
}
}
}
/// Returns the thread-local storage (TLS) kind of this declaration, if applicable.
#[cfg(feature="gte_clang_6_0")]
pub fn get_tls_kind(&self) -> Option<TlsKind> {
unsafe {
match clang_getCursorTLSKind(self.raw) {
CXTLS_None => None,
other => TlsKind::from_raw(other),
}
}
}
/// Returns the translation unit which contains this AST entity.
pub fn get_translation_unit(&self) -> &'tu TranslationUnit<'tu> {
self.tu
}
/// Returns the type of this AST entity, if any.
pub fn get_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getCursorType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the underlying type of this typedef declaration, if applicable.
pub fn get_typedef_underlying_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getTypedefDeclUnderlyingType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the USR for this AST entity, if any.
pub fn get_usr(&self) -> Option<Usr> {
unsafe { utility::to_string_option(clang_getCursorUSR(self.raw)).map(Usr) }
}
/// Returns the linker visibility for this AST entity, if any.
#[cfg(feature="gte_clang_3_8")]
pub fn get_visibility(&self) -> Option<Visibility> {
unsafe {
match clang_getCursorVisibility(self.raw) {
CXVisibility_Invalid => None,
other => Visibility::from_raw(other),
}
}
}
/// Returns the result type of this AST entity, if applicable.
pub fn get_result_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getCursorResultType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns whether this AST entity has any attached attributes.
#[cfg(feature="gte_clang_3_9")]
pub fn has_attributes(&self) -> bool {
unsafe { clang_Cursor_hasAttrs(self.raw) != 0 }
}
/// Returns whether this AST entity is an abstract C++ record.
#[cfg(feature="gte_clang_6_0")]
pub fn is_abstract_record(&self) -> bool {
unsafe { clang_CXXRecord_isAbstract(self.raw) != 0 }
}
/// Returns whether this AST entity is an anonymous record declaration.
#[cfg(feature="gte_clang_3_7")]
pub fn is_anonymous(&self) -> bool {
unsafe { clang_Cursor_isAnonymous(self.raw) != 0 }
}
/// Returns whether this AST entity is a bit field.
pub fn is_bit_field(&self) -> bool {
unsafe { clang_Cursor_isBitField(self.raw) != 0 }
}
/// Returns whether this AST entity is a builtin macro.
#[cfg(feature="gte_clang_3_9")]
pub fn is_builtin_macro(&self) -> bool {
unsafe { clang_Cursor_isMacroBuiltin(self.raw) != 0 }
}
/// Returns whether this AST entity is a const method.
pub fn is_const_method(&self) -> bool {
unsafe { clang_CXXMethod_isConst(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ converting constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_converting_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isConvertingConstructor(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ copy constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_copy_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isCopyConstructor(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ default constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_default_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isDefaultConstructor(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ defaulted constructor or method.
#[cfg(feature="gte_clang_3_9")]
pub fn is_defaulted(&self) -> bool {
unsafe { clang_CXXMethod_isDefaulted(self.raw) != 0 }
}
/// Returns whether this AST entity is a declaration and also the definition of that
/// declaration.
pub fn is_definition(&self) -> bool {
unsafe { clang_isCursorDefinition(self.raw) != 0 }
}
/// Returns whether this AST entity is a dynamic call.
///
/// A dynamic call is either a call to a C++ virtual method or an Objective-C message where the
/// receiver is an object instance, not `super` or a specific class.
pub fn is_dynamic_call(&self) -> bool {
unsafe { clang_Cursor_isDynamicCall(self.raw) != 0 }
}
/// Returns whether this AST entity is a function-like macro.
#[cfg(feature="gte_clang_3_9")]
pub fn is_function_like_macro(&self) -> bool {
unsafe { clang_Cursor_isMacroFunctionLike(self.raw) != 0 }
}
/// Returns whether this AST entity is an inline function.
#[cfg(feature="gte_clang_3_9")]
pub fn is_inline_function(&self) -> bool {
unsafe { clang_Cursor_isFunctionInlined(self.raw) != 0 }
}
/// Returns whether this AST entity is an invalid declaration.
#[cfg(feature="gte_clang_7_0")]
pub fn is_invalid_declaration(&self) -> bool {
unsafe { clang_isInvalidDeclaration(self.raw) != 0 }
}
/// Returns whether this AST entity is a C++ default constructor.
#[cfg(feature="gte_clang_3_9")]
pub fn is_move_constructor(&self) -> bool {
unsafe { clang_CXXConstructor_isMoveConstructor(self.raw) != 0 }
}
#[cfg(feature="gte_clang_3_8")]
/// Returns whether this AST entity is a mutable field in a C++ struct or class.
pub fn is_mutable(&self) -> bool {
unsafe { clang_CXXField_isMutable(self.raw) != 0 }
}
/// Returns whether this AST entity is an Objective-C method or property declaration with the
/// `@optional` attribute applied to it.
pub fn is_objc_optional(&self) -> bool {
unsafe { clang_Cursor_isObjCOptional(self.raw) != 0 }
}
/// Returns whether this AST entity is a pure virtual method.
pub fn is_pure_virtual_method(&self) -> bool {
unsafe { clang_CXXMethod_isPureVirtual(self.raw) != 0 }
}
/// Returns whether this AST entity is a scoped enum.
#[cfg(feature="gte_clang_5_0")]
pub fn is_scoped(&self) -> bool {
unsafe { clang_EnumDecl_isScoped(self.raw) != 0 }
}
/// Returns whether this AST entity is a static method.
pub fn is_static_method(&self) -> bool {
unsafe { clang_CXXMethod_isStatic(self.raw) != 0 }
}
/// Returns whether this AST entity is a variadic function or method.
pub fn is_variadic(&self) -> bool {
unsafe { clang_Cursor_isVariadic(self.raw) != 0 }
}
/// Returns whether this AST entity is a virtual base class specifier.
pub fn is_virtual_base(&self) -> bool {
unsafe { clang_isVirtualBase(self.raw) != 0 }
}
/// Returns whether this AST entity is a virtual method.
pub fn is_virtual_method(&self) -> bool {
unsafe { clang_CXXMethod_isVirtual(self.raw) != 0 }
}
/// Visits the children of this AST entity recursively and returns whether visitation was ended
/// by the callback returning `EntityVisitResult::Break`.
///
/// The first argument of the callback is the AST entity being visited and the second argument
/// is the parent of that AST entity. The return value of the callback determines how visitation
/// will proceed.
pub fn visit_children<F: FnMut(Entity<'tu>, Entity<'tu>) -> EntityVisitResult>(
&self, f: F
) -> bool {
trait EntityCallback<'tu> {
fn call(&mut self, entity: Entity<'tu>, parent: Entity<'tu>) -> EntityVisitResult;
}
impl<'tu, F: FnMut(Entity<'tu>, Entity<'tu>) -> EntityVisitResult>
EntityCallback<'tu> for F {
fn call(&mut self, entity: Entity<'tu>, parent: Entity<'tu>) -> EntityVisitResult {
self(entity, parent)
}
}
extern fn visit(
cursor: CXCursor, parent: CXCursor, data: CXClientData
) -> CXChildVisitResult {
unsafe {
let &mut (tu, ref mut callback) =
&mut *(data as *mut (&TranslationUnit, Box<dyn EntityCallback>));
let entity = Entity::from_raw(cursor, tu);
let parent = Entity::from_raw(parent, tu);
callback.call(entity, parent) as c_int
}
}
let mut data = (self.tu, Box::new(f) as Box<dyn EntityCallback>);
unsafe { clang_visitChildren(self.raw, visit, utility::addressof(&mut data)) != 0 }
}
//- Categorization ---------------------------
/// Returns whether this AST entity is categorized as an attribute.
pub fn is_attribute(&self) -> bool {
unsafe { clang_isAttribute(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a declaration.
pub fn is_declaration(&self) -> bool {
unsafe { clang_isDeclaration(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as an expression.
pub fn is_expression(&self) -> bool {
unsafe { clang_isExpression(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a preprocessing entity.
pub fn is_preprocessing(&self) -> bool {
unsafe { clang_isPreprocessing(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a reference.
pub fn is_reference(&self) -> bool {
unsafe { clang_isReference(self.raw.kind) != 0 }
}
/// Returns whether this AST entity is categorized as a statement.
pub fn is_statement(&self) -> bool {
unsafe { clang_isStatement(self.raw.kind) != 0 }
}
/// Returns whether the categorization of this AST entity is unexposed.
pub fn is_unexposed(&self) -> bool {
unsafe { clang_isUnexposed(self.raw.kind) != 0 }
}
//- Location ---------------------------------
/// Returns whether this AST entity is in a main file.
pub fn is_in_main_file(&self) -> bool {
self.get_range().map_or(false, |r| r.is_in_main_file())
}
/// Returns whether this AST entity is in a system header.
pub fn is_in_system_header(&self) -> bool {
self.get_range().map_or(false, |r| r.is_in_system_header())
}
}
impl<'tu> fmt::Debug for Entity<'tu> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Entity")
.field("kind", &self.get_kind())
.field("display_name", &self.get_display_name())
.field("location", &self.get_location())
.finish()
}
}
impl<'tu> cmp::PartialEq for Entity<'tu> {
fn eq(&self, other: &Entity<'tu>) -> bool {
unsafe { clang_equalCursors(self.raw, other.raw) != 0 }
}
}
impl<'tu> cmp::Eq for Entity<'tu> { }
impl<'tu> hash::Hash for Entity<'tu> {
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
unsafe {
let integer = clang_hashCursor(self.raw);
let bytes = (&integer as *const c_uint) as *const u8;
let slice = slice::from_raw_parts(bytes, mem::size_of_val(&integer));
hasher.write(slice);
}
}
}
// ExternalSymbol ________________________________
/// An `external_source_symbol` attribute.
#[cfg(feature="gte_clang_5_0")]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ExternalSymbol {
/// The `language` string from this attribute.
pub language: String,
/// The `definedIn` string from this attribute.
pub defined: String,
/// Whether `generated_declaration` is set for this attribute.
pub generated: bool,
}
// Index _________________________________________
/// A collection of translation units.
pub struct Index<'c> {
ptr: CXIndex,
_marker: PhantomData<&'c Clang>,
}
impl<'c> Index<'c> {
//- Constructors -----------------------------
fn from_ptr(ptr: CXIndex) -> Index<'c> {
assert!(!ptr.is_null());
Index { ptr, _marker: PhantomData }
}
/// Constructs a new `Index`.
///
/// `exclude` determines whether declarations from precompiled headers are excluded and
/// `diagnostics` determines whether diagnostics are printed while parsing source files.
pub fn new(_: &'c Clang, exclude: bool, diagnostics: bool) -> Index<'c> {
unsafe { Index::from_ptr(clang_createIndex(exclude as c_int, diagnostics as c_int)) }
}
//- Accessors --------------------------------
/// Returns a parser for the supplied file.
pub fn parser<F: Into<PathBuf>>(&'c self, f: F) -> Parser<'c> {
Parser::new(self, f)
}
/// Sets the invocation emission path for this index.
#[cfg(feature="gte_clang_6_0")]
pub fn set_invocation_emission_path<P: AsRef<Path>>(&'c self, path: P) {
let path = utility::from_path(path);
unsafe { clang_CXIndex_setInvocationEmissionPathOption(self.ptr, path.as_ptr()); }
}
/// Returns the thread options for this index.
pub fn get_thread_options(&self) -> ThreadOptions {
unsafe { ThreadOptions::from(clang_CXIndex_getGlobalOptions(self.ptr)) }
}
//- Mutators ---------------------------------
/// Sets the thread options for this index.
pub fn set_thread_options(&mut self, options: ThreadOptions) {
unsafe { clang_CXIndex_setGlobalOptions(self.ptr, options.into()); }
}
}
impl<'c> Drop for Index<'c> {
fn drop(&mut self) {
unsafe { clang_disposeIndex(self.ptr); }
}
}
impl<'c> fmt::Debug for Index<'c> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Index")
.field("thread_options", &self.get_thread_options())
.finish()
}
}
// ObjCAttributes ________________________________
options! {
/// Indicates which attributes were applied to an Objective-C property.
options ObjCAttributes: CXObjCPropertyAttrKind {
/// Indicates use of the `readonly` attribute.
pub readonly: CXObjCPropertyAttr_readonly,
/// Indicates use of the `getter` attribute.
pub getter: CXObjCPropertyAttr_getter,
/// Indicates use of the `assign` attribute.
pub assign: CXObjCPropertyAttr_assign,
/// Indicates use of the `readwrite` attribute.
pub readwrite: CXObjCPropertyAttr_readwrite,
/// Indicates use of the `retain` attribute.
pub retain: CXObjCPropertyAttr_retain,
/// Indicates use of the `copy` attribute.
pub copy: CXObjCPropertyAttr_copy,
/// Indicates use of the `nonatomic` attribute.
pub nonatomic: CXObjCPropertyAttr_nonatomic,
/// Indicates use of the `setter` attribute.
pub setter: CXObjCPropertyAttr_setter,
/// Indicates use of the `atomic` attribute.
pub atomic: CXObjCPropertyAttr_atomic,
/// Indicates use of the `weak` attribute.
pub weak: CXObjCPropertyAttr_weak,
/// Indicates use of the `strong` attribute.
pub strong: CXObjCPropertyAttr_strong,
/// Indicates use of the `unsafe_retained` attribute.
pub unsafe_retained: CXObjCPropertyAttr_unsafe_unretained,
}, objcattributes: #[feature="gte_clang_3_9"] {
/// Indicates use of the `class` attribute.
pub class: CXObjCPropertyAttr_class,
}
}
// ObjCQualifiers ________________________________
options! {
/// Indicates which qualifiers were applied to an Objective-C method return or parameter type.
options ObjCQualifiers: CXObjCDeclQualifierKind {
/// Indicates use of the `in` qualifier.
pub in_: CXObjCDeclQualifier_In,
/// Indicates use of the `inout` qualifier.
pub inout: CXObjCDeclQualifier_Inout,
/// Indicates use of the `out` qualifier.
pub out: CXObjCDeclQualifier_Out,
/// Indicates use of the `bycopy` qualifier.
pub bycopy: CXObjCDeclQualifier_Bycopy,
/// Indicates use of the `byref` qualifier.
pub byref: CXObjCDeclQualifier_Byref,
/// Indicates use of the `oneway` qualifier.
pub oneway: CXObjCDeclQualifier_Oneway,
}
}
// Parser ________________________________________
builder! {
/// Parses translation units.
builder Parser: CXTranslationUnit_Flags {
index: &'tu Index<'tu>,
file: PathBuf,
arguments: Vec<CString>,
unsaved: Vec<Unsaved>;
OPTIONS:
/// Sets whether certain code completion results will be cached when the translation unit is
/// reparsed.
///
/// This option increases the time it takes to reparse the translation unit but improves
/// code completion performance.
pub cache_completion_results: CXTranslationUnit_CacheCompletionResults,
/// Sets whether a detailed preprocessing record will be constructed which tracks all macro
/// definitions and instantiations.
pub detailed_preprocessing_record: CXTranslationUnit_DetailedPreprocessingRecord,
/// Sets whether documentation comment briefs will be included in code completion results.
pub briefs_in_completion_results: CXTranslationUnit_IncludeBriefCommentsInCodeCompletion,
/// Sets whether the translation unit will be considered incomplete.
///
/// This option suppresses certain semantic analyses and is typically used when parsing
/// headers with the intent of creating a precompiled header.
pub incomplete: CXTranslationUnit_Incomplete,
/// Sets whether function and method bodies will be skipped.
pub skip_function_bodies: CXTranslationUnit_SkipFunctionBodies,
/// Sets whether processing will continue after a fatal error is encountered.
#[cfg(feature="gte_clang_3_9")]
pub keep_going: CXTranslationUnit_KeepGoing,
/// Sets whether incremental processing will be used.
#[cfg(feature="gte_clang_5_0")]
pub single_file_parse: CXTranslationUnit_SingleFileParse,
/// Sets whether function bodies will only be skipped in the preamble.
///
/// Used in conjunction with `skip_function_bodies`.
#[cfg(feature="gte_clang_7_0")]
pub limit_skip_function_bodies_to_preamble: CXTranslationUnit_LimitSkipFunctionBodiesToPreamble,
/// Sets whether attributed types should be included.
#[cfg(feature="gte_clang_8_0")]
pub include_attributed_types: CXTranslationUnit_IncludeAttributedTypes,
/// Sets whether implicit attributes should be visited.
#[cfg(feature="gte_clang_8_0")]
pub visit_implicit_attributes: CXTranslationUnit_VisitImplicitAttributes,
}
}
impl<'tu> Parser<'tu> {
//- Constructors -----------------------------
fn new<F: Into<PathBuf>>(index: &'tu Index<'tu>, file: F) -> Parser<'tu> {
let flags: CXTranslationUnit_Flags = 0;
Parser { index, file: file.into(), arguments: vec![], unsaved: vec![], flags }
}
//- Mutators ---------------------------------
/// Sets the compiler arguments to provide to `libclang`.
///
/// Any compiler argument that could be supplied to `clang` may be supplied to this
/// function. However, the following arguments are ignored:
///
/// * `-c`
/// * `-emit-ast`
/// * `-fsyntax-only`
/// * `-o` and the following `<output>`
pub fn arguments<S: AsRef<str>>(&mut self, arguments: &[S]) -> &mut Parser<'tu> {
self.arguments = arguments.iter().map(utility::from_string).collect();
self
}
/// Sets the unsaved files to use.
pub fn unsaved(&mut self, unsaved: &[Unsaved]) -> &mut Parser<'tu> {
self.unsaved = unsaved.into();
self
}
//- Accessors --------------------------------
/// Parses a translation unit.
///
/// # Failures
///
/// * an error occurs while deserializing an AST file
/// * `libclang` crashes
/// * an unknown error occurs
pub fn parse(&self) -> Result<TranslationUnit<'tu>, SourceError> {
let arguments = self.arguments.iter().map(|a| a.as_ptr()).collect::<Vec<_>>();
let unsaved = self.unsaved.iter().map(|u| u.as_raw()).collect::<Vec<_>>();
unsafe {
let mut ptr = ptr::null_mut();
let code = clang_parseTranslationUnit2(
self.index.ptr,
utility::from_path(&self.file).as_ptr(),
arguments.as_ptr(),
arguments.len() as c_int,
unsaved.as_ptr() as *mut CXUnsavedFile,
unsaved.len() as c_uint,
self.flags,
&mut ptr,
);
SourceError::from_error(code).map(|_| TranslationUnit::from_ptr(ptr))
}
}
}
// PlatformAvailability __________________________
/// The availability of an AST entity on a particular platform.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct PlatformAvailability {
/// The name of the platform.
pub platform: String,
/// Whether the AST entity is unavailable on the platform.
pub unavailable: bool,
/// The version of the platform in which this AST entity was introduced, if any.
pub introduced: Option<Version>,
/// The version of the platform in which this AST entity was deprecated, if any.
pub deprecated: Option<Version>,
/// The version of the platform in which this AST entity was obsoleted, if any.
pub obsoleted: Option<Version>,
/// A message to display to users (e.g., replacement API suggestions).
pub message: Option<String>,
}
impl PlatformAvailability {
//- Constructors -----------------------------
fn from_raw(mut raw: CXPlatformAvailability) -> PlatformAvailability {
let availability = PlatformAvailability {
platform: utility::to_string(raw.Platform),
unavailable: raw.Unavailable != 0,
introduced: raw.Introduced.map(Version::from_raw),
deprecated: raw.Deprecated.map(Version::from_raw),
obsoleted: raw.Obsoleted.map(Version::from_raw),
message: utility::to_string_option(raw.Message),
};
unsafe { clang_disposeCXPlatformAvailability(&mut raw); }
availability
}
}
// PrettyPrinter _________________________________
/// Pretty prints declarations.
#[cfg(feature="gte_clang_7_0")]
#[derive(Debug)]
pub struct PrettyPrinter<'e> {
ptr: CXPrintingPolicy,
entity: &'e Entity<'e>,
}
#[cfg(feature="gte_clang_7_0")]
impl<'e> PrettyPrinter<'e> {
//- Constructors -----------------------------
fn from_raw(ptr: CXPrintingPolicy, entity: &'e Entity<'e>) -> Self {
assert!(!ptr.is_null());
PrettyPrinter { ptr, entity }
}
//- Accessors --------------------------------
/// Gets the specified flag value.
pub fn get_flag(&self, flag: PrintingPolicyFlag) -> bool {
unsafe { clang_PrintingPolicy_getProperty(self.ptr, flag as c_int) != 0 }
}
/// Sets the specified flag value.
pub fn set_flag(&self, flag: PrintingPolicyFlag, value: bool) -> &Self {
let value = if value { 1 } else { 0 };
unsafe { clang_PrintingPolicy_setProperty(self.ptr, flag as c_int, value); }
self
}
/// Gets the number of spaces used to indent each line.
pub fn get_indentation_amount(&self) -> u8 {
unsafe { clang_PrintingPolicy_getProperty(self.ptr, CXPrintingPolicy_Indentation) as u8 }
}
/// Sets the number of spaces used to indent each line.
pub fn set_indentation_amount(&self, value: u8) -> &Self {
unsafe {
clang_PrintingPolicy_setProperty(self.ptr, CXPrintingPolicy_Indentation, value.into());
}
self
}
/// Pretty print the declaration.
pub fn print(&self) -> String {
unsafe { utility::to_string(clang_getCursorPrettyPrinted(self.entity.raw, self.ptr)) }
}
}
#[cfg(feature="gte_clang_7_0")]
impl<'e> Drop for PrettyPrinter<'e> {
fn drop(&mut self) {
unsafe { clang_PrintingPolicy_dispose(self.ptr) }
}
}
// Target ________________________________________
/// Information about the target for a translation unit.
#[cfg(feature="gte_clang_5_0")]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Target {
/// The normalized target triple for the target.
pub triple: String,
/// The width of a pointer in the target in bits.
pub pointer_width: usize,
}
#[cfg(feature="gte_clang_5_0")]
impl Target {
//- Constructors -----------------------------
fn from_raw(raw: CXTargetInfo) -> Target {
unsafe {
let target = Target {
triple: utility::to_string(clang_TargetInfo_getTriple(raw)),
pointer_width: clang_TargetInfo_getPointerWidth(raw) as usize,
};
clang_TargetInfo_dispose(raw);
target
}
}
}
// ThreadOptions _________________________________
options! {
/// A set of options that determines which types of threads should use background priority.
#[derive(Default)]
options ThreadOptions: CXGlobalOptFlags {
/// Indicates whether threads creating for editing purposes should use background priority.
pub editing: CXGlobalOpt_ThreadBackgroundPriorityForEditing,
/// Indicates whether threads creating for indexing purposes should use background priority.
pub indexing: CXGlobalOpt_ThreadBackgroundPriorityForIndexing,
}
}
// TranslationUnit _______________________________
/// A preprocessed and parsed source file.
pub struct TranslationUnit<'i> {
ptr: CXTranslationUnit,
_marker: PhantomData<&'i Index<'i>>,
}
impl<'i> TranslationUnit<'i> {
//- Constructors -----------------------------
fn from_ptr(ptr: CXTranslationUnit) -> TranslationUnit<'i> {
assert!(!ptr.is_null());
TranslationUnit { ptr, _marker: PhantomData }
}
/// Constructs a new `TranslationUnit` from an AST file.
///
/// # Failures
///
/// * an unknown error occurs
pub fn from_ast<F: AsRef<Path>>(
index: &'i Index, file: F
) -> Result<TranslationUnit<'i>, ()> {
let path = utility::from_path(file);
let ptr = unsafe { clang_createTranslationUnit(index.ptr, path.as_ptr()) };
ptr.map(TranslationUnit::from_ptr).ok_or(())
}
//- Accessors --------------------------------
/// Returns the diagnostics for this translation unit.
pub fn get_diagnostics(&'i self) -> Vec<Diagnostic<'i>> {
iter!(clang_getNumDiagnostics(self.ptr), clang_getDiagnostic(self.ptr),).map(|d| {
Diagnostic::from_ptr(d, self)
}).collect()
}
/// Returns the entity for this translation unit.
pub fn get_entity(&'i self) -> Entity<'i> {
unsafe { Entity::from_raw(clang_getTranslationUnitCursor(self.ptr), self) }
}
/// Returns the file at the supplied path in this translation unit, if any.
pub fn get_file<F: AsRef<Path>>(&'i self, file: F) -> Option<File<'i>> {
let file = unsafe { clang_getFile(self.ptr, utility::from_path(file).as_ptr()) };
file.map(|f| File::from_ptr(f, self))
}
/// Returns the memory usage of this translation unit.
pub fn get_memory_usage(&self) -> HashMap<MemoryUsage, usize> {
unsafe {
let raw = clang_getCXTUResourceUsage(self.ptr);
let raws = slice::from_raw_parts(raw.entries, raw.numEntries as usize);
let usage = raws
.iter()
.flat_map(|u| MemoryUsage::from_raw(u.kind).map(|kind| (kind, u.amount as usize)))
.collect();
clang_disposeCXTUResourceUsage(raw);
usage
}
}
/// Returns the source ranges in this translation unit that were skipped by the preprocessor.
///
/// This will always return an empty `Vec` if the translation unit was not constructed with a
/// detailed preprocessing record.
#[cfg(feature="gte_clang_4_0")]
pub fn get_skipped_ranges(&'i self) -> Vec<SourceRange<'i>> {
unsafe {
let raw = clang_getAllSkippedRanges(self.ptr);
let raws = slice::from_raw_parts((*raw).ranges, (*raw).count as usize);
let ranges = raws.iter().map(|r| SourceRange::from_raw(*r, self)).collect();
clang_disposeSourceRangeList(raw);
ranges
}
}
/// Returns information about the target for this translation unit.
#[cfg(feature="gte_clang_5_0")]
pub fn get_target(&self) -> Target {
unsafe { Target::from_raw(clang_getTranslationUnitTargetInfo(self.ptr)) }
}
/// Returns the AST entities which correspond to the supplied tokens, if any.
pub fn annotate(&'i self, tokens: &[Token<'i>]) -> Vec<Option<Entity<'i>>> {
unsafe {
let mut raws = vec![CXCursor::default(); tokens.len()];
let ptr = tokens.as_ptr() as *mut CXToken;
clang_annotateTokens(self.ptr, ptr, tokens.len() as c_uint, raws.as_mut_ptr());
raws.iter().map(|e| e.map(|e| Entity::from_raw(e, self))).collect()
}
}
/// Returns a completer which runs code completion.
pub fn completer<F: Into<PathBuf>>(&self, file: F, line: u32, column: u32) -> Completer {
Completer::new(self, file, line, column)
}
/// Saves this translation unit to an AST file.
///
/// # Failures
///
/// * errors in the translation unit prevent saving
/// * an unknown error occurs
pub fn save<F: AsRef<Path>>(&self, file: F) -> Result<(), SaveError> {
let file = utility::from_path(file);
let flags = CXSaveTranslationUnit_None;
let code = unsafe { clang_saveTranslationUnit(self.ptr, file.as_ptr(), flags) };
SaveError::from_error(code)
}
//- Consumers --------------------------------
/// Consumes this translation unit and reparses the source file it was created from with the
/// same compiler arguments that were used originally.
///
/// # Failures
///
/// * an error occurs while deserializing an AST file
/// * `libclang` crashes
/// * an unknown error occurs
pub fn reparse(self, unsaved: &[Unsaved]) -> Result<TranslationUnit<'i>, SourceError> {
let unsaved = unsaved.iter().map(|u| u.as_raw()).collect::<Vec<_>>();
unsafe {
let code = clang_reparseTranslationUnit(
self.ptr,
unsaved.len() as c_uint,
unsaved.as_ptr() as *mut CXUnsavedFile,
CXReparse_None,
);
SourceError::from_error(code).map(|_| self)
}
}
}
impl<'i> Drop for TranslationUnit<'i> {
fn drop(&mut self) {
unsafe { clang_disposeTranslationUnit(self.ptr); }
}
}
impl<'i> fmt::Debug for TranslationUnit<'i> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let spelling = unsafe { clang_getTranslationUnitSpelling(self.ptr) };
formatter.debug_struct("TranslationUnit")
.field("spelling", &utility::to_string(spelling))
.finish()
}
}
// Type __________________________________________
/// The type of an AST entity.
#[derive(Copy, Clone)]
pub struct Type<'tu> {
raw: CXType,
tu: &'tu TranslationUnit<'tu>,
}
impl<'tu> Type<'tu> {
//- Constructors -----------------------------
fn from_raw(raw: CXType, tu: &'tu TranslationUnit<'tu>) -> Type<'tu> {
Type { raw, tu }
}
//- Accessors --------------------------------
/// Returns the kind of this type.
pub fn get_kind(&self) -> TypeKind {
TypeKind::from_raw_infallible(self.raw.kind)
}
/// Returns the display name of this type.
pub fn get_display_name(&self) -> String {
unsafe { utility::to_string(clang_getTypeSpelling(self.raw)) }
}
/// Returns the alignment of this type in bytes.
///
/// # Failures
///
/// * this type is a dependent type
/// * this type is an incomplete type
pub fn get_alignof(&self) -> Result<usize, AlignofError> {
let alignof_ = unsafe { clang_Type_getAlignOf(self.raw) };
AlignofError::from_error(alignof_).map(|_| alignof_ as usize)
}
/// Returns the offset of the field with the supplied name in this record type in bits.
///
/// # Failures
///
/// * this record type is a dependent type
/// * this record record type is an incomplete type
/// * this record type does not contain a field with the supplied name
pub fn get_offsetof<F: AsRef<str>>(&self, field: F) -> Result<usize, OffsetofError> {
let field = utility::from_string(field);
let offsetof_ = unsafe { clang_Type_getOffsetOf(self.raw, field.as_ptr()) };
OffsetofError::from_error(offsetof_).map(|_| offsetof_ as usize)
}
/// Returns the size of this type in bytes.
///
/// # Failures
///
/// * this type is a dependent type
/// * this type is an incomplete type
/// * this type is a variable size type
pub fn get_sizeof(&self) -> Result<usize, SizeofError> {
let sizeof_ = unsafe { clang_Type_getSizeOf(self.raw) };
SizeofError::from_error(sizeof_).map(|_| sizeof_ as usize)
}
/// Returns the address space of this type.
#[cfg(feature="gte_clang_5_0")]
pub fn get_address_space(&self) -> usize {
unsafe { clang_getAddressSpace(self.raw) as usize }
}
/// Returns the argument types for this function or method type, if applicable.
pub fn get_argument_types(&self) -> Option<Vec<Type<'tu>>> {
iter_option!(
clang_getNumArgTypes(self.raw),
clang_getArgType(self.raw),
).map(|i| i.map(|t| Type::from_raw(t, self.tu)).collect())
}
/// Returns the calling convention specified for this function type, if applicable.
pub fn get_calling_convention(&self) -> Option<CallingConvention> {
unsafe {
match clang_getFunctionTypeCallingConv(self.raw) {
CXCallingConv_Invalid => None,
other => CallingConvention::from_raw(other),
}
}
}
/// Returns the canonical type for this type.
///
/// The canonical type is the underlying type with all "sugar" removed (e.g., typedefs).
pub fn get_canonical_type(&self) -> Type<'tu> {
unsafe { Type::from_raw(clang_getCanonicalType(self.raw), self.tu) }
}
/// Returns the class type for this member pointer type, if applicable.
pub fn get_class_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Type_getClassType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the AST entity that declared this type, if any.
pub fn get_declaration(&self) -> Option<Entity<'tu>> {
unsafe { clang_getTypeDeclaration(self.raw).map(|e| Entity::from_raw(e, self.tu)) }
}
/// Returns the type named by this elaborated type, if applicable.
#[cfg(feature="gte_clang_3_9")]
pub fn get_elaborated_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Type_getNamedType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the element type for this array, complex, or vector type, if applicable.
pub fn get_element_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getElementType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the exception specification of this type, if applicable.
#[cfg(feature="gte_clang_5_0")]
pub fn get_exception_specification(&self) -> Option<ExceptionSpecification> {
unsafe {
match clang_getExceptionSpecificationType(self.raw) {
-1 | CXCursor_ExceptionSpecificationKind_None => None,
other => ExceptionSpecification::from_raw(other),
}
}
}
/// Returns the fields in this record type, if applicable.
#[cfg(feature="gte_clang_3_7")]
pub fn get_fields(&self) -> Option<Vec<Entity<'tu>>> {
if self.get_kind() == TypeKind::Record {
let mut fields = vec![];
self.visit_fields(|e| {
fields.push(e);
true
});
Some(fields)
} else {
None
}
}
/// Return the type that was modified by this attributed type.
#[cfg(feature="gte_clang_8_0")]
pub fn get_modified_type(&self) -> Option<Type<'tu>> {
unsafe { clang_Type_getModifiedType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the nullability of this pointer type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_nullability(&self) -> Option<Nullability> {
unsafe {
match clang_Type_getNullability(self.raw) {
CXTypeNullability_Invalid => None,
other => Nullability::from_raw(other),
}
}
}
/// Returns the encoding of this Objective-C type, if applicable.
#[cfg(feature="gte_clang_3_9")]
pub fn get_objc_encoding(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_Type_getObjCEncoding(self.raw)) }
}
/// Returns the base type of this Objective-C type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_object_base_type(&self) -> Option<Type> {
unsafe { clang_Type_getObjCObjectBaseType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the declarations for all protocol references for this Objective-C type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_protocol_declarations(&self) -> Vec<Entity<'tu>> {
iter!(
clang_Type_getNumObjCProtocolRefs(self.raw),
clang_Type_getObjCProtocolDecl(self.raw),
).map(|c| Entity::from_raw(c, self.tu)).collect()
}
/// Returns the type arguments for this Objective-C type, if applicable.
#[cfg(feature="gte_clang_8_0")]
pub fn get_objc_type_arguments(&self) -> Vec<Type<'tu>> {
iter!(
clang_Type_getNumObjCTypeArgs(self.raw),
clang_Type_getObjCTypeArg(self.raw),
).map(|t| Type::from_raw(t, self.tu)).collect()
}
/// Returns the pointee type for this pointer type, if applicable.
pub fn get_pointee_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getPointeeType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the ref qualifier for this C++ function or method type, if applicable.
pub fn get_ref_qualifier(&self) -> Option<RefQualifier> {
unsafe {
match clang_Type_getCXXRefQualifier(self.raw) {
CXRefQualifier_None => None,
other => RefQualifier::from_raw(other),
}
}
}
/// Returns the result type for this function or method type, if applicable.
pub fn get_result_type(&self) -> Option<Type<'tu>> {
unsafe { clang_getResultType(self.raw).map(|t| Type::from_raw(t, self.tu)) }
}
/// Returns the size of this constant array or vector type, if applicable.
pub fn get_size(&self) -> Option<usize> {
let size = unsafe { clang_getNumElements(self.raw) };
if size >= 0 {
Some(size as usize)
} else {
None
}
}
/// Returns the template argument types for this template class specialization type, if
/// applicable.
pub fn get_template_argument_types(&self) -> Option<Vec<Option<Type<'tu>>>> {
iter_option!(
clang_Type_getNumTemplateArguments(self.raw),
clang_Type_getTemplateArgumentAsType(self.raw),
).map(|i| i.map(|t| t.map(|t| Type::from_raw(t, self.tu))).collect())
}
/// Returns the typedef name of this type, if applicable.
#[cfg(feature="gte_clang_5_0")]
pub fn get_typedef_name(&self) -> Option<String> {
unsafe { utility::to_string_option(clang_getTypedefName(self.raw)) }
}
/// Returns whether this type is qualified with const.
pub fn is_const_qualified(&self) -> bool {
unsafe { clang_isConstQualifiedType(self.raw) != 0 }
}
/// Returns whether this type is an elaborated type, if it can be determined for certain.
pub fn is_elaborated(&self) -> Option<bool> {
if self.raw.kind == 119 {
Some(true)
} else if cfg!(feature="gte_clang_3_9") {
Some(false)
} else {
None
}
}
/// Returns whether this type is plain old data (POD).
pub fn is_pod(&self) -> bool {
unsafe { clang_isPODType(self.raw) != 0 }
}
/// Returns whether this type is qualified with restrict.
pub fn is_restrict_qualified(&self) -> bool {
unsafe { clang_isRestrictQualifiedType(self.raw) != 0 }
}
/// Returns whether this type is a transparent tag typedef.
#[cfg(feature="gte_clang_5_0")]
pub fn is_transparent_tag(&self) -> bool {
unsafe { clang_Type_isTransparentTagTypedef(self.raw) != 0 }
}
/// Returns whether this type is a variadic function type.
pub fn is_variadic(&self) -> bool {
unsafe { clang_isFunctionTypeVariadic(self.raw) != 0 }
}
/// Returns whether this type is qualified with volatile.
pub fn is_volatile_qualified(&self) -> bool {
unsafe { clang_isVolatileQualifiedType(self.raw) != 0 }
}
/// Visits the fields in this record type, returning `None` if this type is not a record type
/// and returning `Some(b)` otherwise where `b` indicates whether visitation was ended by the
/// callback returning `false`.
#[cfg(feature="gte_clang_3_7")]
pub fn visit_fields<F: FnMut(Entity<'tu>) -> bool>(&self, f: F) -> Option<bool> {
if self.get_kind() != TypeKind::Record {
return None;
}
trait Callback<'tu> {
fn call(&mut self, field: Entity<'tu>) -> bool;
}
impl<'tu, F: FnMut(Entity<'tu>) -> bool> Callback<'tu> for F {
fn call(&mut self, field: Entity<'tu>) -> bool {
self(field)
}
}
extern fn visit(cursor: CXCursor, data: CXClientData) -> CXVisitorResult {
unsafe {
let &mut (tu, ref mut callback) =
&mut *(data as *mut (&TranslationUnit, Box<dyn Callback>));
if callback.call(Entity::from_raw(cursor, tu)) {
CXVisit_Continue
} else {
CXVisit_Break
}
}
}
let mut data = (self.tu, Box::new(f) as Box<dyn Callback>);
unsafe {
let data = utility::addressof(&mut data);
Some(clang_Type_visitFields(self.raw, visit, data) == CXVisit_Break)
}
}
//- Categorization ---------------------------
/// Returns whether this type is an integer type.
pub fn is_integer(&self) -> bool {
self.raw.kind >= CXType_Bool && self.raw.kind <= CXType_Int128
}
/// Returns whether this type is a signed integer type.
pub fn is_signed_integer(&self) -> bool {
self.raw.kind >= CXType_Char_S && self.raw.kind <= CXType_Int128
}
/// Returns whether this type is an unsigned integer type.
pub fn is_unsigned_integer(&self) -> bool {
self.raw.kind >= CXType_Bool && self.raw.kind <= CXType_UInt128
}
}
impl<'tu> fmt::Debug for Type<'tu> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Type")
.field("kind", &self.get_kind())
.field("display_name", &self.get_display_name())
.finish()
}
}
impl<'tu> cmp::PartialEq for Type<'tu> {
fn eq(&self, other: &Type<'tu>) -> bool {
unsafe { clang_equalTypes(self.raw, other.raw) != 0 }
}
}
impl<'tu> cmp::Eq for Type<'tu> { }
// Unsaved _______________________________________
/// The path to and unsaved contents of a previously existing file.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Unsaved {
path: CString,
contents: CString,
}
impl Unsaved {
//- Constructors -----------------------------
/// Constructs a new `Unsaved`.
pub fn new<P: AsRef<Path>, C: AsRef<str>>(path: P, contents: C) -> Unsaved {
Unsaved { path: utility::from_path(path), contents: utility::from_string(contents) }
}
//- Accessors --------------------------------
fn as_raw(&self) -> CXUnsavedFile {
CXUnsavedFile {
Filename: self.path.as_ptr(),
Contents: self.contents.as_ptr(),
Length: self.contents.as_bytes().len() as c_ulong,
}
}
}
// Usr ___________________________________________
/// A Unified Symbol Resolution (USR).
///
/// A USR identifies an AST entity and can be used to compare AST entities from different
/// translation units.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Usr(pub String);
impl Usr {
//- Constructors -----------------------------
/// Constructs a new `Usr` from an Objective-C category.
pub fn from_objc_category<C: AsRef<str>>(class: C, category: C) -> Usr {
let class = utility::from_string(class);
let category = utility::from_string(category);
let raw = unsafe { clang_constructUSR_ObjCCategory(class.as_ptr(), category.as_ptr()) };
Usr(utility::to_string(raw))
}
/// Constructs a new `Usr` from an Objective-C class.
pub fn from_objc_class<C: AsRef<str>>(class: C) -> Usr {
let class = utility::from_string(class);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCClass(class.as_ptr()))) }
}
/// Constructs a new `Usr` from an Objective-C instance variable.
pub fn from_objc_ivar<N: AsRef<str>>(class: &Usr, name: N) -> Usr {
utility::with_string(&class.0, |s| {
let name = utility::from_string(name);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCIvar(name.as_ptr(), s))) }
})
}
/// Constructs a new `Usr` from an Objective-C method.
pub fn from_objc_method<N: AsRef<str>>(class: &Usr, name: N, instance: bool) -> Usr {
utility::with_string(&class.0, |s| {
let name = utility::from_string(name);
let instance = instance as c_uint;
let raw = unsafe { clang_constructUSR_ObjCMethod(name.as_ptr(), instance, s) };
Usr(utility::to_string(raw))
})
}
/// Constructs a new `Usr` from an Objective-C property.
pub fn from_objc_property<N: AsRef<str>>(class: &Usr, name: N) -> Usr {
utility::with_string(&class.0, |s| {
let name = utility::from_string(name);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCProperty(name.as_ptr(), s))) }
})
}
/// Constructs a new `Usr` from an Objective-C protocol.
pub fn from_objc_protocol<P: AsRef<str>>(protocol: P) -> Usr {
let string = utility::from_string(protocol);
unsafe { Usr(utility::to_string(clang_constructUSR_ObjCProtocol(string.as_ptr()))) }
}
}
// Version _______________________________________
/// A version number in the form `x.y.z`.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct Version {
/// The `x` component of the version number.
pub x: i32,
/// The `y` component of the version number.
pub y: i32,
/// The `z` component of the version number.
pub z: i32,
}
impl Version {
//- Constructors -----------------------------
fn from_raw(raw: CXVersion) -> Version {
Version { x: raw.Major as i32, y: raw.Minor as i32, z: raw.Subminor as i32 }
}
}
//================================================
// Functions
//================================================
/// Returns the version string for the version of `libclang` in use.
pub fn get_version() -> String {
unsafe { utility::to_string(clang_getClangVersion()) }
}
|
#![crate_name = "rusoto"]
#![crate_type = "lib"]
#![cfg_attr(feature = "unstable", feature(proc_macro))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(feature = "nightly-testing", allow(cyclomatic_complexity, used_underscore_binding, ptr_arg, suspicious_else_formatting))]
#![allow(dead_code)]
#![cfg_attr(not(feature = "unstable"), deny(warnings))]
//! Rusoto is an [AWS](https://aws.amazon.com/) SDK for Rust.
//! A high level overview is available in `README.md` at https://github.com/rusoto/rusoto.
//!
//! # Example
//!
//! The following code shows a simple example of using Rusoto's DynamoDB API to
//! list the names of all tables in a database.
//!
//! ```rust,ignore
//! use std::default::Default;
//!
//! use rusoto::{DefaultCredentialsProvider, Region};
//! use rusoto::dynamodb::{DynamoDb, DynamoDbClient, ListTablesInput};
//!
//! let provider = DefaultCredentialsProvider::new().unwrap();
//! let client = DynamoDbClient::new(provider, Region::UsEast1);
//! let list_tables_input: ListTablesInput = Default::default();
//!
//! match client.list_tables(&list_tables_input) {
//! Ok(output) => {
//! match output.table_names {
//! Some(table_name_list) => {
//! println!("Tables in database:");
//!
//! for table_name in table_name_list {
//! println!("{}", table_name);
//! }
//! },
//! None => println!("No tables in database!"),
//! }
//! },
//! Err(error) => {
//! println!("Error: {:?}", error);
//! },
//! }
extern crate rusoto_core;
pub use rusoto_core::*;
#[cfg(feature = "acm")]
pub extern crate rusoto_acm as acm;
#[cfg(feature = "autoscaling")]
pub extern crate rusoto_autoscaling as autoscaling;
#[cfg(feature = "cloudformation")]
pub extern crate rusoto_cloudformation as cloudformation;
#[cfg(feature = "cloudfront")]
pub extern crate rusoto_cloudfront as cloudfront;
#[cfg(feature = "cloudhsm")]
pub extern crate rusoto_cloudhsm as cloudhsm;
#[cfg(feature = "cloudsearch")]
pub extern crate rusoto_cloudsearch as cloudsearch;
#[cfg(feature = "cloudtrail")]
pub extern crate rusoto_cloudtrail as cloudtrail;
#[cfg(feature = "cloudwatch")]
pub extern crate rusoto_cloudwatch as cloudwatch;
#[cfg(feature = "codecommit")]
pub extern crate rusoto_codecommit as codecommit;
#[cfg(feature = "codedeploy")]
pub extern crate rusoto_codedeploy as codedeploy;
#[cfg(feature = "codepipeline")]
pub extern crate rusoto_codepipeline as codepipeline;
#[cfg(feature = "cognito-identity")]
pub extern crate rusoto_cognito_identity as cognitoidentity;
#[cfg(feature = "config")]
pub extern crate rusoto_config as config;
#[cfg(feature = "datapipeline")]
pub extern crate rusoto_datapipeline as datapipeline;
#[cfg(feature = "devicefarm")]
pub extern crate rusoto_devicefarm as devicefarm;
#[cfg(feature = "directconnect")]
pub extern crate rusoto_directconnect as directconnect;
#[cfg(feature = "ds")]
pub extern crate rusoto_ds as ds;
#[cfg(feature = "dynamodb")]
pub extern crate rusoto_dynamodb as dynamodb;
#[cfg(feature = "dynamodbstreams")]
pub extern crate rusoto_dynamodbstreams as dynamodbstreams;
#[cfg(feature = "ec2")]
pub extern crate rusoto_ec2 as ec2;
#[cfg(feature = "ecr")]
pub extern crate rusoto_ecr as ecr;
#[cfg(feature = "ecs")]
pub extern crate rusoto_ecs as ecs;
#[cfg(feature = "emr")]
pub extern crate rusoto_emr as emr;
#[cfg(feature = "elasticache")]
pub extern crate rusoto_elasticache as elasticache;
#[cfg(feature = "elasticbeanstalk")]
pub extern crate rusoto_elasticbeanstalk as elasticbeanstalk;
#[cfg(feature = "elastictranscoder")]
pub extern crate rusoto_elastictranscoder as elastictranscoder;
#[cfg(feature = "elb")]
pub extern crate rusoto_elb as elb;
#[cfg(feature = "elbv2")]
pub extern crate rusoto_elbv2 as elbv2;
#[cfg(feature = "events")]
pub extern crate rusoto_events as events;
#[cfg(feature = "firehose")]
pub extern crate rusoto_firehose as firehose;
#[cfg(feature = "iam")]
pub extern crate rusoto_iam as iam;
#[cfg(feature = "importexport")]
pub extern crate rusoto_importexport as importexport;
#[cfg(feature = "inspector")]
pub extern crate rusoto_inspector as inspector;
#[cfg(feature = "iot")]
pub extern crate rusoto_iot as iot;
#[cfg(feature = "kinesis")]
pub extern crate rusoto_kinesis as kinesis;
#[cfg(feature = "kms")]
pub extern crate rusoto_kms as kms;
#[cfg(feature = "lambda")]
pub extern crate rusoto_lambda as lambda;
#[cfg(feature = "logs")]
pub extern crate rusoto_logs as logs;
#[cfg(feature = "machinelearning")]
pub extern crate rusoto_machinelearning as machinelearning;
#[cfg(feature = "marketplacecommerceanalytics")]
pub extern crate rusoto_marketplacecommerceanalytics as marketplacecommerceanalytics;
#[cfg(feature = "opsworks")]
pub extern crate rusoto_opsworks as opsworks;
#[cfg(feature = "redshift")]
pub extern crate rusoto_redshift as redshift;
#[cfg(feature = "rds")]
pub extern crate rusoto_rds as rds;
#[cfg(feature = "route53")]
pub extern crate rusoto_route53 as route53;
#[cfg(feature = "route53domains")]
pub extern crate rusoto_route53domains as route53domains;
#[cfg(feature = "s3")]
pub extern crate rusoto_s3 as s3;
#[cfg(feature = "sdb")]
pub extern crate rusoto_sdb as sdb;
#[cfg(feature = "ses")]
pub extern crate rusoto_ses as ses;
#[cfg(feature = "sns")]
pub extern crate rusoto_sns as sns;
#[cfg(feature = "sqs")]
pub extern crate rusoto_sqs as sqs;
#[cfg(feature = "ssm")]
pub extern crate rusoto_ssm as ssm;
#[cfg(feature = "storagegateway")]
pub extern crate rusoto_storagegateway as storagegateway;
#[cfg(feature = "sts")]
pub extern crate rusoto_sts as sts;
#[cfg(feature = "swf")]
pub extern crate rusoto_swf as swf;
#[cfg(feature = "waf")]
pub extern crate rusoto_waf as waf;
#[cfg(feature = "workspaces")]
pub extern crate rusoto_workspaces as workspaces;
/*
#[cfg(feature = "gamelift")]
pub extern crate rusoto_gamelift as gamelift;
#[cfg(feature = "support")]
pub extern crate rusoto_support as support;
*/
Remove lib.rs
|
#[macro_use]
extern crate nom;
pub mod parser;
use std::result;
/// A SemVer Version
#[derive(PartialEq,Debug)]
pub struct Version {
major: u32,
minor: u32,
patch: u32,
pre: Option<String>,
build: Option<String>,
}
/// An error type for this crate
///
/// Currently, just a generic error. Will make this nicer later.
#[derive(PartialEq,Debug)]
enum SemVerError {
GenericError,
}
/// A Result type for errors
pub type Result<T> = result::Result<T, SemVerError>;
impl From<()> for SemVerError {
fn from(_: ()) -> SemVerError {
SemVerError::GenericError
}
}
impl Version {
/// Create a Version from a string
///
/// Currently supported: x, x.y, and x.y.z versions.
pub fn parse(version: &str) -> Result<Version> {
Ok(try!(parser::try_parse(version.trim().as_bytes())))
}
}
#[cfg(test)]
mod tests {
use super::Version;
use super::SemVerError;
#[test]
fn test_parse() {
assert_eq!(Version::parse(""), Err(SemVerError::GenericError));
assert_eq!(Version::parse(" "), Err(SemVerError::GenericError));
//assert_eq!(Version::parse("1"), Err(SemVerError::GenericError));
//assert_eq!(Version::parse("1.2"), Err(SemVerError::GenericError));
//assert_eq!(Version::parse("1.2.3-"), Err(SemVerError::GenericError));
assert_eq!(Version::parse("a.b.c"), Err(SemVerError::GenericError));
//assert_eq!(Version::parse("1.2.3 abc"), Err(SemVerError::GenericError));
assert!(Version::parse("1.2.3") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: None,
}));
assert!(Version::parse(" 1.2.3 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: None,
}));
assert!(Version::parse("1.2.3-alpha1") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: None,
}));
assert!(Version::parse(" 1.2.3-alpha1 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: None
}));
assert!(Version::parse("1.2.3+build5") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: Some(String::from("build5")),
}));
assert!(Version::parse(" 1.2.3+build5 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: Some(String::from("build5")),
}));
assert!(Version::parse("1.2.3-alpha1+build5") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: Some(String::from("build5")),
}));
assert!(Version::parse(" 1.2.3-alpha1+build5 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: Some(String::from("build5")),
}));
assert!(Version::parse("1.2.3-1.alpha1.9+build5.7.3aedf ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("1.alpha1.9")),
build: Some(String::from("build5.7.3aedf")),
}));
assert_eq!(Version::parse("0.4.0-beta.1+0851523"), Ok(Version {
major: 0,
minor: 4,
patch: 0,
pre: Some(String::from("beta.1")),
build: Some(String::from("0851523")),
}));
}
/*
#[test]
fn test_increment_patch() {
let mut buggy_release = Version::parse("0.1.0").unwrap();
buggy_release.increment_patch();
assert_eq!(buggy_release, Version::parse("0.1.1").unwrap());
}
#[test]
fn test_increment_minor() {
let mut feature_release = Version::parse("1.4.6").unwrap();
feature_release.increment_minor();
assert_eq!(feature_release, Version::parse("1.5.0").unwrap());
}
#[test]
fn test_increment_major() {
let mut chrome_release = Version::parse("46.1.246773").unwrap();
chrome_release.increment_major();
assert_eq!(chrome_release, Version::parse("47.0.0").unwrap());
}
#[test]
fn test_increment_keep_prerelease() {
let mut release = Version::parse("1.0.0-alpha").unwrap();
release.increment_patch();
assert_eq!(release, Version::parse("1.0.1").unwrap());
release.increment_minor();
assert_eq!(release, Version::parse("1.1.0").unwrap());
release.increment_major();
assert_eq!(release, Version::parse("2.0.0").unwrap());
}
#[test]
fn test_increment_clear_metadata() {
let mut release = Version::parse("1.0.0+4442").unwrap();
release.increment_patch();
assert_eq!(release, Version::parse("1.0.1").unwrap());
release = Version::parse("1.0.1+hello").unwrap();
release.increment_minor();
assert_eq!(release, Version::parse("1.1.0").unwrap());
release = Version::parse("1.1.3747+hello").unwrap();
release.increment_major();
assert_eq!(release, Version::parse("2.0.0").unwrap());
}
#[test]
fn test_eq() {
assert_eq!(Version::parse("1.2.3"), Version::parse("1.2.3"));
assert_eq!(Version::parse("1.2.3-alpha1"), Version::parse("1.2.3-alpha1"));
assert_eq!(Version::parse("1.2.3+build.42"), Version::parse("1.2.3+build.42"));
assert_eq!(Version::parse("1.2.3-alpha1+42"), Version::parse("1.2.3-alpha1+42"));
assert_eq!(Version::parse("1.2.3+23"), Version::parse("1.2.3+42"));
}
#[test]
fn test_ne() {
assert!(Version::parse("0.0.0") != Version::parse("0.0.1"));
assert!(Version::parse("0.0.0") != Version::parse("0.1.0"));
assert!(Version::parse("0.0.0") != Version::parse("1.0.0"));
assert!(Version::parse("1.2.3-alpha") != Version::parse("1.2.3-beta"));
}
#[test]
fn test_show() {
assert_eq!(format!("{}", Version::parse("1.2.3").unwrap()),
"1.2.3".to_string());
assert_eq!(format!("{}", Version::parse("1.2.3-alpha1").unwrap()),
"1.2.3-alpha1".to_string());
assert_eq!(format!("{}", Version::parse("1.2.3+build.42").unwrap()),
"1.2.3+build.42".to_string());
assert_eq!(format!("{}", Version::parse("1.2.3-alpha1+42").unwrap()),
"1.2.3-alpha1+42".to_string());
}
#[test]
fn test_to_string() {
assert_eq!(Version::parse("1.2.3").unwrap().to_string(), "1.2.3".to_string());
assert_eq!(Version::parse("1.2.3-alpha1").unwrap().to_string(), "1.2.3-alpha1".to_string());
assert_eq!(Version::parse("1.2.3+build.42").unwrap().to_string(), "1.2.3+build.42".to_string());
assert_eq!(Version::parse("1.2.3-alpha1+42").unwrap().to_string(), "1.2.3-alpha1+42".to_string());
}
#[test]
fn test_lt() {
assert!(Version::parse("0.0.0") < Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.0.0") < Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.0") < Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3-alpha1") < Version::parse("1.2.3"));
assert!(Version::parse("1.2.3-alpha1") < Version::parse("1.2.3-alpha2"));
assert!(!(Version::parse("1.2.3-alpha2") < Version::parse("1.2.3-alpha2")));
assert!(!(Version::parse("1.2.3+23") < Version::parse("1.2.3+42")));
}
#[test]
fn test_le() {
assert!(Version::parse("0.0.0") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.0.0") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.0") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3-alpha1") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3-alpha2") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3+23") <= Version::parse("1.2.3+42"));
}
#[test]
fn test_gt() {
assert!(Version::parse("1.2.3-alpha2") > Version::parse("0.0.0"));
assert!(Version::parse("1.2.3-alpha2") > Version::parse("1.0.0"));
assert!(Version::parse("1.2.3-alpha2") > Version::parse("1.2.0"));
assert!(Version::parse("1.2.3-alpha2") > Version::parse("1.2.3-alpha1"));
assert!(Version::parse("1.2.3") > Version::parse("1.2.3-alpha2"));
assert!(!(Version::parse("1.2.3-alpha2") > Version::parse("1.2.3-alpha2")));
assert!(!(Version::parse("1.2.3+23") > Version::parse("1.2.3+42")));
}
#[test]
fn test_ge() {
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("0.0.0"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.0.0"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.2.0"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.2.3-alpha1"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3+23") >= Version::parse("1.2.3+42"));
}
#[test]
fn test_prerelease_check() {
assert!(Version::parse("1.0.0").unwrap().is_prerelease() == false);
assert!(Version::parse("0.0.1").unwrap().is_prerelease() == false);
assert!(Version::parse("4.1.4-alpha").unwrap().is_prerelease());
assert!(Version::parse("1.0.0-beta294296").unwrap().is_prerelease());
}
#[test]
fn test_spec_order() {
let vs = ["1.0.0-alpha",
"1.0.0-alpha.1",
"1.0.0-alpha.beta",
"1.0.0-beta",
"1.0.0-beta.2",
"1.0.0-beta.11",
"1.0.0-rc.1",
"1.0.0"];
let mut i = 1;
while i < vs.len() {
let a = Version::parse(vs[i-1]).unwrap();
let b = Version::parse(vs[i]).unwrap();
assert!(a < b);
i += 1;
}
}
*/
}
these now pass
#[macro_use]
extern crate nom;
pub mod parser;
use std::result;
/// A SemVer Version
#[derive(PartialEq,Debug)]
pub struct Version {
major: u32,
minor: u32,
patch: u32,
pre: Option<String>,
build: Option<String>,
}
/// An error type for this crate
///
/// Currently, just a generic error. Will make this nicer later.
#[derive(PartialEq,Debug)]
enum SemVerError {
GenericError,
}
/// A Result type for errors
pub type Result<T> = result::Result<T, SemVerError>;
impl From<()> for SemVerError {
fn from(_: ()) -> SemVerError {
SemVerError::GenericError
}
}
impl Version {
/// Create a Version from a string
///
/// Currently supported: x, x.y, and x.y.z versions.
pub fn parse(version: &str) -> Result<Version> {
Ok(try!(parser::try_parse(version.trim().as_bytes())))
}
}
#[cfg(test)]
mod tests {
use super::Version;
use super::SemVerError;
#[test]
fn test_parse() {
assert_eq!(Version::parse(""), Err(SemVerError::GenericError));
assert_eq!(Version::parse(" "), Err(SemVerError::GenericError));
assert_eq!(Version::parse("1"), Err(SemVerError::GenericError));
assert_eq!(Version::parse("1.2"), Err(SemVerError::GenericError));
//assert_eq!(Version::parse("1.2.3-"), Err(SemVerError::GenericError));
assert_eq!(Version::parse("a.b.c"), Err(SemVerError::GenericError));
//assert_eq!(Version::parse("1.2.3 abc"), Err(SemVerError::GenericError));
assert!(Version::parse("1.2.3") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: None,
}));
assert!(Version::parse(" 1.2.3 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: None,
}));
assert!(Version::parse("1.2.3-alpha1") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: None,
}));
assert!(Version::parse(" 1.2.3-alpha1 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: None
}));
assert!(Version::parse("1.2.3+build5") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: Some(String::from("build5")),
}));
assert!(Version::parse(" 1.2.3+build5 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: None,
build: Some(String::from("build5")),
}));
assert!(Version::parse("1.2.3-alpha1+build5") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: Some(String::from("build5")),
}));
assert!(Version::parse(" 1.2.3-alpha1+build5 ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("alpha1")),
build: Some(String::from("build5")),
}));
assert!(Version::parse("1.2.3-1.alpha1.9+build5.7.3aedf ") == Ok(Version {
major: 1,
minor: 2,
patch: 3,
pre: Some(String::from("1.alpha1.9")),
build: Some(String::from("build5.7.3aedf")),
}));
assert_eq!(Version::parse("0.4.0-beta.1+0851523"), Ok(Version {
major: 0,
minor: 4,
patch: 0,
pre: Some(String::from("beta.1")),
build: Some(String::from("0851523")),
}));
}
/*
#[test]
fn test_increment_patch() {
let mut buggy_release = Version::parse("0.1.0").unwrap();
buggy_release.increment_patch();
assert_eq!(buggy_release, Version::parse("0.1.1").unwrap());
}
#[test]
fn test_increment_minor() {
let mut feature_release = Version::parse("1.4.6").unwrap();
feature_release.increment_minor();
assert_eq!(feature_release, Version::parse("1.5.0").unwrap());
}
#[test]
fn test_increment_major() {
let mut chrome_release = Version::parse("46.1.246773").unwrap();
chrome_release.increment_major();
assert_eq!(chrome_release, Version::parse("47.0.0").unwrap());
}
#[test]
fn test_increment_keep_prerelease() {
let mut release = Version::parse("1.0.0-alpha").unwrap();
release.increment_patch();
assert_eq!(release, Version::parse("1.0.1").unwrap());
release.increment_minor();
assert_eq!(release, Version::parse("1.1.0").unwrap());
release.increment_major();
assert_eq!(release, Version::parse("2.0.0").unwrap());
}
#[test]
fn test_increment_clear_metadata() {
let mut release = Version::parse("1.0.0+4442").unwrap();
release.increment_patch();
assert_eq!(release, Version::parse("1.0.1").unwrap());
release = Version::parse("1.0.1+hello").unwrap();
release.increment_minor();
assert_eq!(release, Version::parse("1.1.0").unwrap());
release = Version::parse("1.1.3747+hello").unwrap();
release.increment_major();
assert_eq!(release, Version::parse("2.0.0").unwrap());
}
#[test]
fn test_eq() {
assert_eq!(Version::parse("1.2.3"), Version::parse("1.2.3"));
assert_eq!(Version::parse("1.2.3-alpha1"), Version::parse("1.2.3-alpha1"));
assert_eq!(Version::parse("1.2.3+build.42"), Version::parse("1.2.3+build.42"));
assert_eq!(Version::parse("1.2.3-alpha1+42"), Version::parse("1.2.3-alpha1+42"));
assert_eq!(Version::parse("1.2.3+23"), Version::parse("1.2.3+42"));
}
#[test]
fn test_ne() {
assert!(Version::parse("0.0.0") != Version::parse("0.0.1"));
assert!(Version::parse("0.0.0") != Version::parse("0.1.0"));
assert!(Version::parse("0.0.0") != Version::parse("1.0.0"));
assert!(Version::parse("1.2.3-alpha") != Version::parse("1.2.3-beta"));
}
#[test]
fn test_show() {
assert_eq!(format!("{}", Version::parse("1.2.3").unwrap()),
"1.2.3".to_string());
assert_eq!(format!("{}", Version::parse("1.2.3-alpha1").unwrap()),
"1.2.3-alpha1".to_string());
assert_eq!(format!("{}", Version::parse("1.2.3+build.42").unwrap()),
"1.2.3+build.42".to_string());
assert_eq!(format!("{}", Version::parse("1.2.3-alpha1+42").unwrap()),
"1.2.3-alpha1+42".to_string());
}
#[test]
fn test_to_string() {
assert_eq!(Version::parse("1.2.3").unwrap().to_string(), "1.2.3".to_string());
assert_eq!(Version::parse("1.2.3-alpha1").unwrap().to_string(), "1.2.3-alpha1".to_string());
assert_eq!(Version::parse("1.2.3+build.42").unwrap().to_string(), "1.2.3+build.42".to_string());
assert_eq!(Version::parse("1.2.3-alpha1+42").unwrap().to_string(), "1.2.3-alpha1+42".to_string());
}
#[test]
fn test_lt() {
assert!(Version::parse("0.0.0") < Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.0.0") < Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.0") < Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3-alpha1") < Version::parse("1.2.3"));
assert!(Version::parse("1.2.3-alpha1") < Version::parse("1.2.3-alpha2"));
assert!(!(Version::parse("1.2.3-alpha2") < Version::parse("1.2.3-alpha2")));
assert!(!(Version::parse("1.2.3+23") < Version::parse("1.2.3+42")));
}
#[test]
fn test_le() {
assert!(Version::parse("0.0.0") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.0.0") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.0") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3-alpha1") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3-alpha2") <= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3+23") <= Version::parse("1.2.3+42"));
}
#[test]
fn test_gt() {
assert!(Version::parse("1.2.3-alpha2") > Version::parse("0.0.0"));
assert!(Version::parse("1.2.3-alpha2") > Version::parse("1.0.0"));
assert!(Version::parse("1.2.3-alpha2") > Version::parse("1.2.0"));
assert!(Version::parse("1.2.3-alpha2") > Version::parse("1.2.3-alpha1"));
assert!(Version::parse("1.2.3") > Version::parse("1.2.3-alpha2"));
assert!(!(Version::parse("1.2.3-alpha2") > Version::parse("1.2.3-alpha2")));
assert!(!(Version::parse("1.2.3+23") > Version::parse("1.2.3+42")));
}
#[test]
fn test_ge() {
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("0.0.0"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.0.0"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.2.0"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.2.3-alpha1"));
assert!(Version::parse("1.2.3-alpha2") >= Version::parse("1.2.3-alpha2"));
assert!(Version::parse("1.2.3+23") >= Version::parse("1.2.3+42"));
}
#[test]
fn test_prerelease_check() {
assert!(Version::parse("1.0.0").unwrap().is_prerelease() == false);
assert!(Version::parse("0.0.1").unwrap().is_prerelease() == false);
assert!(Version::parse("4.1.4-alpha").unwrap().is_prerelease());
assert!(Version::parse("1.0.0-beta294296").unwrap().is_prerelease());
}
#[test]
fn test_spec_order() {
let vs = ["1.0.0-alpha",
"1.0.0-alpha.1",
"1.0.0-alpha.beta",
"1.0.0-beta",
"1.0.0-beta.2",
"1.0.0-beta.11",
"1.0.0-rc.1",
"1.0.0"];
let mut i = 1;
while i < vs.len() {
let a = Version::parse(vs[i-1]).unwrap();
let b = Version::parse(vs[i]).unwrap();
assert!(a < b);
i += 1;
}
}
*/
}
|
//! Generate Rust bindings for C and C++ libraries.
//!
//! Provide a C/C++ header file, receive Rust FFI code to call into C/C++
//! functions and use types defined in the header.
//!
//! See the [`Builder`](./struct.Builder.html) struct for usage.
#![deny(missing_docs)]
#![deny(warnings)]
#![deny(unused_extern_crates)]
// To avoid rather annoying warnings when matching with CXCursor_xxx as a
// constant.
#![allow(non_upper_case_globals)]
// `quote!` nests quite deeply.
#![recursion_limit="128"]
extern crate cexpr;
#[macro_use]
#[allow(unused_extern_crates)]
extern crate cfg_if;
extern crate clang_sys;
#[macro_use]
extern crate lazy_static;
extern crate peeking_take_while;
#[macro_use]
extern crate quote;
extern crate regex;
extern crate which;
#[cfg(feature = "logging")]
#[macro_use]
extern crate log;
#[cfg(not(feature = "logging"))]
#[macro_use]
mod log_stubs;
#[macro_use]
mod extra_assertions;
// A macro to declare an internal module for which we *must* provide
// documentation for. If we are building with the "testing_only_docs" feature,
// then the module is declared public, and our `#![deny(missing_docs)]` pragma
// applies to it. This feature is used in CI, so we won't let anything slip by
// undocumented. Normal builds, however, will leave the module private, so that
// we don't expose internals to library consumers.
macro_rules! doc_mod {
($m:ident, $doc_mod_name:ident) => {
cfg_if! {
if #[cfg(feature = "testing_only_docs")] {
pub mod $doc_mod_name {
//! Autogenerated documentation module.
pub use super::$m::*;
}
} else {
}
}
};
}
mod clang;
mod codegen;
mod features;
mod ir;
mod parse;
mod regex_set;
mod time;
pub mod callbacks;
doc_mod!(clang, clang_docs);
doc_mod!(features, features_docs);
doc_mod!(ir, ir_docs);
doc_mod!(parse, parse_docs);
doc_mod!(regex_set, regex_set_docs);
pub use features::{LATEST_STABLE_RUST, RUST_TARGET_STRINGS, RustTarget};
use features::RustFeatures;
use ir::context::{BindgenContext, ItemId};
use ir::item::Item;
use parse::{ClangItemParser, ParseError};
use regex_set::RegexSet;
use std::borrow::Cow;
use std::fs::{File, OpenOptions};
use std::io::{self, Write};
use std::iter;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::Arc;
/// A type used to indicate which kind of items do we have to generate.
///
/// TODO(emilio): Use `bitflags!`
#[derive(Debug, Clone)]
pub struct CodegenConfig {
/// Whether to generate functions.
pub functions: bool,
/// Whether to generate types.
pub types: bool,
/// Whether to generate constants.
pub vars: bool,
/// Whether to generate methods.
pub methods: bool,
/// Whether to generate constructors.
pub constructors: bool,
/// Whether to generate destructors.
pub destructors: bool,
}
impl CodegenConfig {
/// Generate all kinds of items.
pub fn all() -> Self {
CodegenConfig {
functions: true,
types: true,
vars: true,
methods: true,
constructors: true,
destructors: true,
}
}
/// Generate nothing.
pub fn nothing() -> Self {
CodegenConfig {
functions: false,
types: false,
vars: false,
methods: false,
constructors: false,
destructors: false,
}
}
}
impl Default for CodegenConfig {
fn default() -> Self {
CodegenConfig::all()
}
}
/// Configure and generate Rust bindings for a C/C++ header.
///
/// This is the main entry point to the library.
///
/// ```ignore
/// use bindgen::builder;
///
/// // Configure and generate bindings.
/// let bindings = try!(builder().header("path/to/input/header")
/// .whitelisted_type("SomeCoolClass")
/// .whitelisted_function("do_some_cool_thing")
/// .generate());
///
/// // Write the generated bindings to an output file.
/// try!(bindings.write_to_file("path/to/output.rs"));
/// ```
#[derive(Debug, Default)]
pub struct Builder {
options: BindgenOptions,
input_headers: Vec<String>,
// Tuples of unsaved file contents of the form (name, contents).
input_header_contents: Vec<(String, String)>,
}
/// Construct a new [`Builder`](./struct.Builder.html).
pub fn builder() -> Builder {
Default::default()
}
impl Builder {
/// Generates the command line flags use for creating `Builder`.
pub fn command_line_flags(&self) -> Vec<String> {
let mut output_vector: Vec<String> = Vec::new();
if let Some(header) = self.input_headers.last().cloned() {
// Positional argument 'header'
output_vector.push(header);
}
output_vector.push(self.options.rust_target.into());
self.options
.bitfield_enums
.get_items()
.iter()
.map(|item| {
output_vector.push("--bitfield-enum".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.rustified_enums
.get_items()
.iter()
.map(|item| {
output_vector.push("--rustified-enum".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.constified_enum_modules
.get_items()
.iter()
.map(|item| {
output_vector.push("--constified-enum-module".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.blacklisted_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--blacklist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.layout_tests {
output_vector.push("--no-layout-tests".into());
}
if self.options.impl_debug {
output_vector.push("--impl-debug".into());
}
if self.options.impl_partialeq {
output_vector.push("--impl-partialeq".into());
}
if !self.options.derive_copy {
output_vector.push("--no-derive-copy".into());
}
if !self.options.derive_debug {
output_vector.push("--no-derive-debug".into());
}
if !self.options.derive_default {
output_vector.push("--no-derive-default".into());
} else {
output_vector.push("--with-derive-default".into());
}
if self.options.derive_hash {
output_vector.push("--with-derive-hash".into());
}
if self.options.derive_partialord {
output_vector.push("--with-derive-partialord".into());
}
if self.options.derive_ord {
output_vector.push("--with-derive-ord".into());
}
if self.options.derive_partialeq {
output_vector.push("--with-derive-partialeq".into());
}
if self.options.derive_eq {
output_vector.push("--with-derive-eq".into());
}
if self.options.time_phases {
output_vector.push("--time-phases".into());
}
if !self.options.generate_comments {
output_vector.push("--no-doc-comments".into());
}
if !self.options.whitelist_recursively {
output_vector.push("--no-recursive-whitelist".into());
}
if self.options.objc_extern_crate {
output_vector.push("--objc-extern-crate".into());
}
if self.options.builtins {
output_vector.push("--builtins".into());
}
if let Some(ref prefix) = self.options.ctypes_prefix {
output_vector.push("--ctypes-prefix".into());
output_vector.push(prefix.clone());
}
if self.options.emit_ast {
output_vector.push("--emit-clang-ast".into());
}
if self.options.emit_ir {
output_vector.push("--emit-ir".into());
}
if let Some(ref graph) = self.options.emit_ir_graphviz {
output_vector.push("--emit-ir-graphviz".into());
output_vector.push(graph.clone())
}
if self.options.enable_cxx_namespaces {
output_vector.push("--enable-cxx-namespaces".into());
}
if self.options.disable_name_namespacing {
output_vector.push("--disable-name-namespacing".into());
}
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--framework".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.codegen_config.functions {
output_vector.push("--ignore-functions".into());
}
output_vector.push("--generate".into());
//Temporary placeholder for below 4 options
let mut options: Vec<String> = Vec::new();
if self.options.codegen_config.functions {
options.push("function".into());
}
if self.options.codegen_config.types {
options.push("types".into());
}
if self.options.codegen_config.vars {
options.push("vars".into());
}
if self.options.codegen_config.methods {
options.push("methods".into());
}
if self.options.codegen_config.constructors {
options.push("constructors".into());
}
if self.options.codegen_config.destructors {
options.push("destructors".into());
}
output_vector.push(options.join(","));
if !self.options.codegen_config.methods {
output_vector.push("--ignore-methods".into());
}
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--clang-args".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.convert_floats {
output_vector.push("--no-convert-floats".into());
}
if !self.options.prepend_enum_name {
output_vector.push("--no-prepend-enum-name".into());
}
self.options
.opaque_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--opaque-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.raw_lines
.iter()
.map(|item| {
output_vector.push("--raw-line".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--static".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if self.options.use_core {
output_vector.push("--use-core".into());
}
if self.options.conservative_inline_namespaces {
output_vector.push("--conservative-inline-namespaces".into());
}
self.options
.whitelisted_functions
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-function".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.whitelisted_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.whitelisted_vars
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-var".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
output_vector.push("--".into());
if !self.options.clang_args.is_empty() {
output_vector.extend(self.options.clang_args.iter().cloned());
}
if self.input_headers.len() > 1 {
output_vector.extend(
self.input_headers[..self.input_headers.len() - 1]
.iter()
.cloned(),
);
}
if !self.options.rustfmt_bindings {
output_vector.push("--no-rustfmt-bindings".into());
}
if let Some(path) = self.options
.rustfmt_configuration_file
.as_ref()
.and_then(|f| f.to_str())
{
output_vector.push("--rustfmt-configuration-file".into());
output_vector.push(path.into());
}
self.options
.no_partialeq_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--no-partialeq".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.no_copy_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--no-copy".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.no_hash_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--no-hash".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
output_vector
}
/// Add an input C/C++ header to generate bindings for.
///
/// This can be used to generate bindings to a single header:
///
/// ```ignore
/// let bindings = bindgen::Builder::default()
/// .header("input.h")
/// .generate()
/// .unwrap();
/// ```
///
/// Or you can invoke it multiple times to generate bindings to multiple
/// headers:
///
/// ```ignore
/// let bindings = bindgen::Builder::default()
/// .header("first.h")
/// .header("second.h")
/// .header("third.h")
/// .generate()
/// .unwrap();
/// ```
pub fn header<T: Into<String>>(mut self, header: T) -> Builder {
self.input_headers.push(header.into());
self
}
/// Add `contents` as an input C/C++ header named `name`.
///
/// The file `name` will be added to the clang arguments.
pub fn header_contents(mut self, name: &str, contents: &str) -> Builder {
self.input_header_contents.push(
(name.into(), contents.into()),
);
self
}
/// Specify the rust target
///
/// The default is the latest stable Rust version
pub fn rust_target(mut self, rust_target: RustTarget) -> Self {
self.options.set_rust_target(rust_target);
self
}
/// Set the output graphviz file.
pub fn emit_ir_graphviz<T: Into<String>>(mut self, path: T) -> Builder {
let path = path.into();
self.options.emit_ir_graphviz = Some(path);
self
}
/// Whether the generated bindings should contain documentation comments or
/// not.
///
/// This ideally will always be true, but it may need to be false until we
/// implement some processing on comments to work around issues as described
/// in:
///
/// https://github.com/rust-lang-nursery/rust-bindgen/issues/426
pub fn generate_comments(mut self, doit: bool) -> Self {
self.options.generate_comments = doit;
self
}
/// Whether to whitelist recursively or not. Defaults to true.
///
/// Given that we have explicitly whitelisted the "initiate_dance_party"
/// function in this C header:
///
/// ```c
/// typedef struct MoonBoots {
/// int bouncy_level;
/// } MoonBoots;
///
/// void initiate_dance_party(MoonBoots* boots);
/// ```
///
/// We would normally generate bindings to both the `initiate_dance_party`
/// function and the `MoonBoots` struct that it transitively references. By
/// configuring with `whitelist_recursively(false)`, `bindgen` will not emit
/// bindings for anything except the explicitly whitelisted items, and there
/// would be no emitted struct definition for `MoonBoots`. However, the
/// `initiate_dance_party` function would still reference `MoonBoots`!
///
/// **Disabling this feature will almost certainly cause `bindgen` to emit
/// bindings that will not compile!** If you disable this feature, then it
/// is *your* responsiblity to provide definitions for every type that is
/// referenced from an explicitly whitelisted item. One way to provide the
/// definitions is by using the [`Builder::raw_line`](#method.raw_line)
/// method, another would be to define them in Rust and then `include!(...)`
/// the bindings immediately afterwards.
pub fn whitelist_recursively(mut self, doit: bool) -> Self {
self.options.whitelist_recursively = doit;
self
}
/// Generate `#[macro_use] extern crate objc;` instead of `use objc;`
/// in the prologue of the files generated from objective-c files
pub fn objc_extern_crate(mut self, doit: bool) -> Self {
self.options.objc_extern_crate = doit;
self
}
/// Whether to use the clang-provided name mangling. This is true by default
/// and probably needed for C++ features.
///
/// However, some old libclang versions seem to return incorrect results in
/// some cases for non-mangled functions, see [1], so we allow disabling it.
///
/// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/528
pub fn trust_clang_mangling(mut self, doit: bool) -> Self {
self.options.enable_mangling = doit;
self
}
/// Hide the given type from the generated bindings. Regular expressions are
/// supported.
#[deprecated = "Use blacklist_type instead"]
pub fn hide_type<T: AsRef<str>>(self, arg: T) -> Builder {
self.blacklist_type(arg)
}
/// Hide the given type from the generated bindings. Regular expressions are
/// supported.
pub fn blacklist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.blacklisted_types.insert(arg);
self
}
/// Treat the given type as opaque in the generated bindings. Regular
/// expressions are supported.
pub fn opaque_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.opaque_types.insert(arg);
self
}
/// Whitelist the given type so that it (and all types that it transitively
/// refers to) appears in the generated bindings. Regular expressions are
/// supported.
#[deprecated = "use whitelist_type instead"]
pub fn whitelisted_type<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_type(arg)
}
/// Whitelist the given type so that it (and all types that it transitively
/// refers to) appears in the generated bindings. Regular expressions are
/// supported.
pub fn whitelist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.whitelisted_types.insert(arg);
self
}
/// Whitelist the given function so that it (and all types that it
/// transitively refers to) appears in the generated bindings. Regular
/// expressions are supported.
pub fn whitelist_function<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.whitelisted_functions.insert(arg);
self
}
/// Whitelist the given function.
///
/// Deprecated: use whitelist_function instead.
#[deprecated = "use whitelist_function instead"]
pub fn whitelisted_function<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_function(arg)
}
/// Whitelist the given variable so that it (and all types that it
/// transitively refers to) appears in the generated bindings. Regular
/// expressions are supported.
pub fn whitelist_var<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.whitelisted_vars.insert(arg);
self
}
/// Whitelist the given variable.
///
/// Deprecated: use whitelist_var instead.
#[deprecated = "use whitelist_var instead"]
pub fn whitelisted_var<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_var(arg)
}
/// Mark the given enum (or set of enums, if using a pattern) as being
/// bitfield-like. Regular expressions are supported.
///
/// This makes bindgen generate a type that isn't a rust `enum`. Regular
/// expressions are supported.
pub fn bitfield_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.bitfield_enums.insert(arg);
self
}
/// Mark the given enum (or set of enums, if using a pattern) as a Rust
/// enum.
///
/// This makes bindgen generate enums instead of constants. Regular
/// expressions are supported.
///
/// **Use this with caution.** You should not be using Rust enums unless
/// you have complete control of the C/C++ code that you're binding to.
/// Take a look at https://github.com/rust-lang/rust/issues/36927 for
/// more information.
pub fn rustified_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.rustified_enums.insert(arg);
self
}
/// Mark the given enum (or set of enums, if using a pattern) as a set of
/// constants that should be put into a module.
///
/// This makes bindgen generate modules containing constants instead of
/// just constants. Regular expressions are supported.
pub fn constified_enum_module<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.constified_enum_modules.insert(arg);
self
}
/// Add a string to prepend to the generated bindings. The string is passed
/// through without any modification.
pub fn raw_line<T: Into<String>>(mut self, arg: T) -> Builder {
self.options.raw_lines.push(arg.into());
self
}
/// Add an argument to be passed straight through to clang.
pub fn clang_arg<T: Into<String>>(mut self, arg: T) -> Builder {
self.options.clang_args.push(arg.into());
self
}
/// Add arguments to be passed straight through to clang.
pub fn clang_args<I>(mut self, iter: I) -> Builder
where
I: IntoIterator,
I::Item: AsRef<str>,
{
for arg in iter {
self = self.clang_arg(arg.as_ref())
}
self
}
/// Make the generated bindings link the given shared library.
pub fn link<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push((library.into(), LinkType::Default));
self
}
/// Make the generated bindings link the given static library.
pub fn link_static<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push((library.into(), LinkType::Static));
self
}
/// Make the generated bindings link the given framework.
pub fn link_framework<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push(
(library.into(), LinkType::Framework),
);
self
}
/// Emit bindings for builtin definitions (for example `__builtin_va_list`)
/// in the generated Rust.
pub fn emit_builtins(mut self) -> Builder {
self.options.builtins = true;
self
}
/// Avoid converting floats to `f32`/`f64` by default.
pub fn no_convert_floats(mut self) -> Self {
self.options.convert_floats = false;
self
}
/// Set whether layout tests should be generated.
pub fn layout_tests(mut self, doit: bool) -> Self {
self.options.layout_tests = doit;
self
}
/// Set whether `Debug` should be implemented, if it can not be derived automatically.
pub fn impl_debug(mut self, doit: bool) -> Self {
self.options.impl_debug = doit;
self
}
/// Set whether `PartialEq` should be implemented, if it can not be derived automatically.
pub fn impl_partialeq(mut self, doit: bool) -> Self {
self.options.impl_partialeq = doit;
self
}
/// Set whether `Copy` should be derived by default.
pub fn derive_copy(mut self, doit: bool) -> Self {
self.options.derive_copy = doit;
self
}
/// Set whether `Debug` should be derived by default.
pub fn derive_debug(mut self, doit: bool) -> Self {
self.options.derive_debug = doit;
self
}
/// Set whether `Default` should be derived by default.
pub fn derive_default(mut self, doit: bool) -> Self {
self.options.derive_default = doit;
self
}
/// Set whether `Hash` should be derived by default.
pub fn derive_hash(mut self, doit: bool) -> Self {
self.options.derive_hash = doit;
self
}
/// Set whether `PartialOrd` should be derived by default.
/// If we don't compute partialord, we also cannot compute
/// ord. Set the derive_ord to `false` when doit is `false`.
pub fn derive_partialord(mut self, doit: bool) -> Self {
self.options.derive_partialord = doit;
if !doit {
self.options.derive_ord = false;
}
self
}
/// Set whether `Ord` should be derived by default.
/// We can't compute `Ord` without computing `PartialOrd`,
/// so we set the same option to derive_partialord.
pub fn derive_ord(mut self, doit: bool) -> Self {
self.options.derive_ord = doit;
self.options.derive_partialord = doit;
self
}
/// Set whether `PartialEq` should be derived by default.
///
/// If we don't derive `PartialEq`, we also cannot derive `Eq`, so deriving
/// `Eq` is also disabled when `doit` is `false`.
pub fn derive_partialeq(mut self, doit: bool) -> Self {
self.options.derive_partialeq = doit;
if !doit {
self.options.derive_eq = false;
}
self
}
/// Set whether `Eq` should be derived by default.
///
/// We can't derive `Eq` without also deriving `PartialEq`, so we also
/// enable deriving `PartialEq` when `doit` is `true`.
pub fn derive_eq(mut self, doit: bool) -> Self {
self.options.derive_eq = doit;
if doit {
self.options.derive_partialeq = doit;
}
self
}
/// Set whether or not to time bindgen phases, and print information to
/// stderr.
pub fn time_phases(mut self, doit: bool) -> Self {
self.options.time_phases = doit;
self
}
/// Emit Clang AST.
pub fn emit_clang_ast(mut self) -> Builder {
self.options.emit_ast = true;
self
}
/// Emit IR.
pub fn emit_ir(mut self) -> Builder {
self.options.emit_ir = true;
self
}
/// Enable C++ namespaces.
pub fn enable_cxx_namespaces(mut self) -> Builder {
self.options.enable_cxx_namespaces = true;
self
}
/// Disable name auto-namespacing.
///
/// By default, bindgen mangles names like `foo::bar::Baz` to look like
/// `foo_bar_Baz` instead of just `Baz`.
///
/// This method disables that behavior.
///
/// Note that this intentionally does not change the names used for
/// whitelisting and blacklisting, which should still be mangled with the
/// namespaces.
///
/// Note, also, that this option may cause bindgen to generate duplicate
/// names.
pub fn disable_name_namespacing(mut self) -> Builder {
self.options.disable_name_namespacing = true;
self
}
/// Treat inline namespaces conservatively.
///
/// This is tricky, because in C++ is technically legal to override an item
/// defined in an inline namespace:
///
/// ```cpp
/// inline namespace foo {
/// using Bar = int;
/// }
/// using Bar = long;
/// ```
///
/// Even though referencing `Bar` is a compiler error.
///
/// We want to support this (arguably esoteric) use case, but we don't want
/// to make the rest of bindgen users pay an usability penalty for that.
///
/// To support this, we need to keep all the inline namespaces around, but
/// then bindgen usage is a bit more difficult, because you cannot
/// reference, e.g., `std::string` (you'd need to use the proper inline
/// namespace).
///
/// We could complicate a lot of the logic to detect name collisions, and if
/// not detected generate a `pub use inline_ns::*` or something like that.
///
/// That's probably something we can do if we see this option is needed in a
/// lot of cases, to improve it's usability, but my guess is that this is
/// not going to be too useful.
pub fn conservative_inline_namespaces(mut self) -> Builder {
self.options.conservative_inline_namespaces = true;
self
}
/// Whether inline functions should be generated or not.
///
/// Note that they will usually not work. However you can use
/// `-fkeep-inline-functions` or `-fno-inline-functions` if you are
/// responsible of compiling the library to make them callable.
pub fn generate_inline_functions(mut self, doit: bool) -> Self {
self.options.generate_inline_functions = doit;
self
}
/// Ignore functions.
pub fn ignore_functions(mut self) -> Builder {
self.options.codegen_config.functions = false;
self
}
/// Ignore methods.
pub fn ignore_methods(mut self) -> Builder {
self.options.codegen_config.methods = false;
self
}
/// Avoid generating any unstable Rust, such as Rust unions, in the generated bindings.
#[deprecated(note = "please use `rust_target` instead")]
pub fn unstable_rust(self, doit: bool) -> Self {
let rust_target = if doit {
RustTarget::Nightly
} else {
LATEST_STABLE_RUST
};
self.rust_target(rust_target)
}
/// Use core instead of libstd in the generated bindings.
pub fn use_core(mut self) -> Builder {
self.options.use_core = true;
self
}
/// Use the given prefix for the raw types instead of `::std::os::raw`.
pub fn ctypes_prefix<T: Into<String>>(mut self, prefix: T) -> Builder {
self.options.ctypes_prefix = Some(prefix.into());
self
}
/// Allows configuring types in different situations, see the
/// [`ParseCallbacks`](./callbacks/trait.ParseCallbacks.html) documentation.
pub fn parse_callbacks(
mut self,
cb: Box<callbacks::ParseCallbacks>,
) -> Self {
self.options.parse_callbacks = Some(cb);
self
}
/// Choose what to generate using a
/// [`CodegenConfig`](./struct.CodegenConfig.html).
pub fn with_codegen_config(mut self, config: CodegenConfig) -> Self {
self.options.codegen_config = config;
self
}
/// Prepend the enum name to constant or bitfield variants.
pub fn prepend_enum_name(mut self, doit: bool) -> Self {
self.options.prepend_enum_name = doit;
self
}
/// Set whether rustfmt should format the generated bindings.
pub fn rustfmt_bindings(mut self, doit: bool) -> Self {
self.options.rustfmt_bindings = doit;
self
}
/// Set the absolute path to the rustfmt configuration file, if None, the standard rustfmt
/// options are used.
pub fn rustfmt_configuration_file(mut self, path: Option<PathBuf>) -> Self {
self = self.rustfmt_bindings(true);
self.options.rustfmt_configuration_file = path;
self
}
/// Generate the Rust bindings using the options built up thus far.
pub fn generate(mut self) -> Result<Bindings, ()> {
self.options.input_header = self.input_headers.pop();
self.options.clang_args.extend(
self.input_headers
.drain(..)
.flat_map(|header| {
iter::once("-include".into()).chain(iter::once(header))
}),
);
self.options.input_unsaved_files.extend(
self.input_header_contents.drain(..).map(|(name, contents)| {
clang::UnsavedFile::new(&name, &contents)
}),
);
Bindings::generate(self.options)
}
/// Preprocess and dump the input header files to disk.
///
/// This is useful when debugging bindgen, using C-Reduce, or when filing
/// issues. The resulting file will be named something like `__bindgen.i` or
/// `__bindgen.ii`
pub fn dump_preprocessed_input(&self) -> io::Result<()> {
let clang = clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "Cannot find clang executable")
})?;
// The contents of a wrapper file that includes all the input header
// files.
let mut wrapper_contents = String::new();
// Whether we are working with C or C++ inputs.
let mut is_cpp = false;
// For each input header, add `#include "$header"`.
for header in &self.input_headers {
is_cpp |= header.ends_with(".hpp");
wrapper_contents.push_str("#include \"");
wrapper_contents.push_str(header);
wrapper_contents.push_str("\"\n");
}
// For each input header content, add a prefix line of `#line 0 "$name"`
// followed by the contents.
for &(ref name, ref contents) in &self.input_header_contents {
is_cpp |= name.ends_with(".hpp");
wrapper_contents.push_str("#line 0 \"");
wrapper_contents.push_str(name);
wrapper_contents.push_str("\"\n");
wrapper_contents.push_str(contents);
}
is_cpp |= self.options.clang_args.windows(2).any(|w| {
w[0] == "-x=c++" || w[1] == "-x=c++" || w == &["-x", "c++"]
});
let wrapper_path = PathBuf::from(if is_cpp {
"__bindgen.cpp"
} else {
"__bindgen.c"
});
{
let mut wrapper_file = File::create(&wrapper_path)?;
wrapper_file.write(wrapper_contents.as_bytes())?;
}
let mut cmd = Command::new(&clang.path);
cmd.arg("-save-temps")
.arg("-E")
.arg("-C")
.arg("-c")
.arg(&wrapper_path)
.stdout(Stdio::piped());
for a in &self.options.clang_args {
cmd.arg(a);
}
let mut child = cmd.spawn()?;
let mut preprocessed = child.stdout.take().unwrap();
let mut file = File::create(if is_cpp {
"__bindgen.ii"
} else {
"__bindgen.i"
})?;
io::copy(&mut preprocessed, &mut file)?;
if child.wait()?.success() {
Ok(())
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"clang exited with non-zero status",
))
}
}
/// Don't derive `PartialEq` for a given type. Regular
/// expressions are supported.
pub fn no_partialeq(mut self, arg: String) -> Builder {
self.options.no_partialeq_types.insert(arg);
self
}
/// Don't derive `Copy` for a given type. Regular
/// expressions are supported.
pub fn no_copy(mut self, arg: String) -> Self {
self.options.no_copy_types.insert(arg);
self
}
/// Don't derive `Hash` for a given type. Regular
/// expressions are supported.
pub fn no_hash(mut self, arg: String) -> Builder {
self.options.no_hash_types.insert(arg);
self
}
}
/// Configuration options for generated bindings.
#[derive(Debug)]
struct BindgenOptions {
/// The set of types that have been blacklisted and should not appear
/// anywhere in the generated code.
blacklisted_types: RegexSet,
/// The set of types that should be treated as opaque structures in the
/// generated code.
opaque_types: RegexSet,
/// The set of types that we should have bindings for in the generated
/// code.
///
/// This includes all types transitively reachable from any type in this
/// set. One might think of whitelisted types/vars/functions as GC roots,
/// and the generated Rust code as including everything that gets marked.
whitelisted_types: RegexSet,
/// Whitelisted functions. See docs for `whitelisted_types` for more.
whitelisted_functions: RegexSet,
/// Whitelisted variables. See docs for `whitelisted_types` for more.
whitelisted_vars: RegexSet,
/// The enum patterns to mark an enum as bitfield.
bitfield_enums: RegexSet,
/// The enum patterns to mark an enum as a Rust enum.
rustified_enums: RegexSet,
/// The enum patterns to mark an enum as a module of constants.
constified_enum_modules: RegexSet,
/// Whether we should generate builtins or not.
builtins: bool,
/// The set of libraries we should link in the generated Rust code.
links: Vec<(String, LinkType)>,
/// True if we should dump the Clang AST for debugging purposes.
emit_ast: bool,
/// True if we should dump our internal IR for debugging purposes.
emit_ir: bool,
/// Output graphviz dot file.
emit_ir_graphviz: Option<String>,
/// True if we should emulate C++ namespaces with Rust modules in the
/// generated bindings.
enable_cxx_namespaces: bool,
/// True if we should avoid mangling names with namespaces.
disable_name_namespacing: bool,
/// True if we should generate layout tests for generated structures.
layout_tests: bool,
/// True if we should implement the Debug trait for C/C++ structures and types
/// that do not support automatically deriving Debug.
impl_debug: bool,
/// True if we should implement the PartialEq trait for C/C++ structures and types
/// that do not support autoamically deriving PartialEq.
impl_partialeq: bool,
/// True if we should derive Copy trait implementations for C/C++ structures
/// and types.
derive_copy: bool,
/// True if we should derive Debug trait implementations for C/C++ structures
/// and types.
derive_debug: bool,
/// True if we should derive Default trait implementations for C/C++ structures
/// and types.
derive_default: bool,
/// True if we should derive Hash trait implementations for C/C++ structures
/// and types.
derive_hash: bool,
/// True if we should derive PartialOrd trait implementations for C/C++ structures
/// and types.
derive_partialord: bool,
/// True if we should derive Ord trait implementations for C/C++ structures
/// and types.
derive_ord: bool,
/// True if we should derive PartialEq trait implementations for C/C++ structures
/// and types.
derive_partialeq: bool,
/// True if we should derive Eq trait implementations for C/C++ structures
/// and types.
derive_eq: bool,
/// True if we should avoid using libstd to use libcore instead.
use_core: bool,
/// An optional prefix for the "raw" types, like `c_int`, `c_void`...
ctypes_prefix: Option<String>,
/// Whether to time the bindgen phases.
time_phases: bool,
/// True if we should generate constant names that are **directly** under
/// namespaces.
namespaced_constants: bool,
/// True if we should use MSVC name mangling rules.
msvc_mangling: bool,
/// Whether we should convert float types to f32/f64 types.
convert_floats: bool,
/// The set of raw lines to prepend to the generated Rust code.
raw_lines: Vec<String>,
/// The set of arguments to pass straight through to Clang.
clang_args: Vec<String>,
/// The input header file.
input_header: Option<String>,
/// Unsaved files for input.
input_unsaved_files: Vec<clang::UnsavedFile>,
/// A user-provided visitor to allow customizing different kinds of
/// situations.
parse_callbacks: Option<Box<callbacks::ParseCallbacks>>,
/// Which kind of items should we generate? By default, we'll generate all
/// of them.
codegen_config: CodegenConfig,
/// Whether to treat inline namespaces conservatively.
///
/// See the builder method description for more details.
conservative_inline_namespaces: bool,
/// Wether to keep documentation comments in the generated output. See the
/// documentation for more details.
generate_comments: bool,
/// Whether to generate inline functions. Defaults to false.
generate_inline_functions: bool,
/// Wether to whitelist types recursively. Defaults to true.
whitelist_recursively: bool,
/// Intead of emitting 'use objc;' to files generated from objective c files,
/// generate '#[macro_use] extern crate objc;'
objc_extern_crate: bool,
/// Whether to use the clang-provided name mangling. This is true and
/// probably needed for C++ features.
///
/// However, some old libclang versions seem to return incorrect results in
/// some cases for non-mangled functions, see [1], so we allow disabling it.
///
/// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/528
enable_mangling: bool,
/// Whether to prepend the enum name to bitfield or constant variants.
prepend_enum_name: bool,
/// Version of the Rust compiler to target
rust_target: RustTarget,
/// Features to enable, derived from `rust_target`
rust_features: RustFeatures,
/// Whether rustfmt should format the generated bindings.
rustfmt_bindings: bool,
/// The absolute path to the rustfmt configuration file, if None, the standard rustfmt
/// options are used.
rustfmt_configuration_file: Option<PathBuf>,
/// The set of types that we should not derive `PartialEq` for.
no_partialeq_types: RegexSet,
/// The set of types that we should not derive `Copy` for.
no_copy_types: RegexSet,
/// The set of types that we should not derive `Hash` for.
no_hash_types: RegexSet,
}
/// TODO(emilio): This is sort of a lie (see the error message that results from
/// removing this), but since we don't share references across panic boundaries
/// it's ok.
impl ::std::panic::UnwindSafe for BindgenOptions {}
impl BindgenOptions {
fn build(&mut self) {
self.whitelisted_vars.build();
self.whitelisted_types.build();
self.whitelisted_functions.build();
self.blacklisted_types.build();
self.opaque_types.build();
self.bitfield_enums.build();
self.constified_enum_modules.build();
self.rustified_enums.build();
self.no_partialeq_types.build();
self.no_copy_types.build();
self.no_hash_types.build();
}
/// Update rust target version
pub fn set_rust_target(&mut self, rust_target: RustTarget) {
self.rust_target = rust_target;
// Keep rust_features synced with rust_target
self.rust_features = rust_target.into();
}
/// Get features supported by target Rust version
pub fn rust_features(&self) -> RustFeatures {
self.rust_features
}
}
impl Default for BindgenOptions {
fn default() -> BindgenOptions {
let rust_target = RustTarget::default();
BindgenOptions {
rust_target: rust_target,
rust_features: rust_target.into(),
blacklisted_types: Default::default(),
opaque_types: Default::default(),
whitelisted_types: Default::default(),
whitelisted_functions: Default::default(),
whitelisted_vars: Default::default(),
bitfield_enums: Default::default(),
rustified_enums: Default::default(),
constified_enum_modules: Default::default(),
builtins: false,
links: vec![],
emit_ast: false,
emit_ir: false,
emit_ir_graphviz: None,
layout_tests: true,
impl_debug: false,
impl_partialeq: false,
derive_copy: true,
derive_debug: true,
derive_default: false,
derive_hash: false,
derive_partialord: false,
derive_ord: false,
derive_partialeq: false,
derive_eq: false,
enable_cxx_namespaces: false,
disable_name_namespacing: false,
use_core: false,
ctypes_prefix: None,
namespaced_constants: true,
msvc_mangling: false,
convert_floats: true,
raw_lines: vec![],
clang_args: vec![],
input_header: None,
input_unsaved_files: vec![],
parse_callbacks: None,
codegen_config: CodegenConfig::all(),
conservative_inline_namespaces: false,
generate_comments: true,
generate_inline_functions: false,
whitelist_recursively: true,
objc_extern_crate: false,
enable_mangling: true,
prepend_enum_name: true,
time_phases: false,
rustfmt_bindings: true,
rustfmt_configuration_file: None,
no_partialeq_types: Default::default(),
no_copy_types: Default::default(),
no_hash_types: Default::default(),
}
}
}
/// The linking type to use with a given library.
///
/// TODO: #104: This is ignored at the moment, but shouldn't be.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum LinkType {
/// Use shared library linking. This is the default.
Default,
/// Use static linking.
Static,
/// The library is an OSX framework.
Framework,
}
fn ensure_libclang_is_loaded() {
if clang_sys::is_loaded() {
return;
}
// XXX (issue #350): Ensure that our dynamically loaded `libclang`
// doesn't get dropped prematurely, nor is loaded multiple times
// across different threads.
lazy_static! {
static ref LIBCLANG: Arc<clang_sys::SharedLibrary> = {
clang_sys::load().expect("Unable to find libclang");
clang_sys::get_library()
.expect("We just loaded libclang and it had better still be \
here!")
};
}
clang_sys::set_library(Some(LIBCLANG.clone()));
}
/// Generated Rust bindings.
#[derive(Debug)]
pub struct Bindings {
options: BindgenOptions,
module: quote::Tokens,
}
impl Bindings {
/// Generate bindings for the given options.
pub(crate) fn generate(
mut options: BindgenOptions,
) -> Result<Bindings, ()> {
ensure_libclang_is_loaded();
options.build();
// Filter out include paths and similar stuff, so we don't incorrectly
// promote them to `-isystem`.
let clang_args_for_clang_sys = {
let mut last_was_include_prefix = false;
options.clang_args.iter().filter(|arg| {
if last_was_include_prefix {
last_was_include_prefix = false;
return false;
}
let arg = &**arg;
// https://clang.llvm.org/docs/ClangCommandLineReference.html
// -isystem and -isystem-after are harmless.
if arg == "-I" || arg == "--include-directory" {
last_was_include_prefix = true;
return false;
}
if arg.starts_with("-I") || arg.starts_with("--include-directory=") {
return false;
}
true
}).cloned().collect::<Vec<_>>()
};
// TODO: Make this path fixup configurable?
if let Some(clang) = clang_sys::support::Clang::find(
None,
&clang_args_for_clang_sys,
)
{
// If --target is specified, assume caller knows what they're doing
// and don't mess with include paths for them
let has_target_arg = options
.clang_args
.iter()
.rposition(|arg| arg.starts_with("--target"))
.is_some();
if !has_target_arg {
// TODO: distinguish C and C++ paths? C++'s should be enough, I
// guess.
if let Some(cpp_search_paths) = clang.cpp_search_paths {
for path in cpp_search_paths.into_iter() {
if let Ok(path) = path.into_os_string().into_string() {
options.clang_args.push("-isystem".to_owned());
options.clang_args.push(path);
}
}
}
}
}
#[cfg(unix)]
fn can_read(perms: &std::fs::Permissions) -> bool {
use std::os::unix::fs::PermissionsExt;
perms.mode() & 0o444 > 0
}
#[cfg(not(unix))]
fn can_read(_: &std::fs::Permissions) -> bool {
true
}
if let Some(h) = options.input_header.as_ref() {
let md = std::fs::metadata(h).ok().unwrap();
if !md.is_file() {
eprintln!("error: '{}' is a folder", h);
return Err(());
}
if !can_read(&md.permissions()) {
eprintln!("error: insufficient permissions to read '{}'", h);
return Err(());
}
options.clang_args.push(h.clone())
}
for f in options.input_unsaved_files.iter() {
options.clang_args.push(f.name.to_str().unwrap().to_owned())
}
let time_phases = options.time_phases;
let mut context = BindgenContext::new(options);
{
let _t = time::Timer::new("parse")
.with_output(time_phases);
try!(parse(&mut context));
}
let (items, options) = codegen::codegen(context);
Ok(Bindings {
options: options,
module: quote! {
#( #items )*
}
})
}
/// Convert these bindings into source text (with raw lines prepended).
pub fn to_string(&self) -> String {
let mut bytes = vec![];
self.write(Box::new(&mut bytes) as Box<Write>)
.expect("writing to a vec cannot fail");
String::from_utf8(bytes)
.expect("we should only write bindings that are valid utf-8")
}
/// Write these bindings as source text to a file.
pub fn write_to_file<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
let file = OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(path.as_ref())?;
self.write(Box::new(file))?;
Ok(())
}
/// Write these bindings as source text to the given `Write`able.
pub fn write<'a>(&self, mut writer: Box<Write + 'a>) -> io::Result<()> {
writer.write(
"/* automatically generated by rust-bindgen */\n\n".as_bytes(),
)?;
for line in self.options.raw_lines.iter() {
writer.write(line.as_bytes())?;
writer.write("\n".as_bytes())?;
}
if !self.options.raw_lines.is_empty() {
writer.write("\n".as_bytes())?;
}
let bindings = self.module.as_str().to_string();
match self.rustfmt_generated_string(&bindings) {
Ok(rustfmt_bindings) => {
writer.write(rustfmt_bindings.as_bytes())?;
},
Err(err) => {
eprintln!("{:?}", err);
writer.write(bindings.as_str().as_bytes())?;
},
}
Ok(())
}
/// Checks if rustfmt_bindings is set and runs rustfmt on the string
fn rustfmt_generated_string<'a>(
&self,
source: &'a str,
) -> io::Result<Cow<'a, str>> {
let _t = time::Timer::new("rustfmt_generated_string")
.with_output(self.options.time_phases);
if !self.options.rustfmt_bindings {
return Ok(Cow::Borrowed(source));
}
let rustfmt = which::which("rustfmt")
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_owned()))?;
// Prefer using the `rustfmt-nightly` version of `rustmft`, if
// possible. It requires being run via `rustup run nightly ...`.
let mut cmd = if let Ok(rustup) = which::which("rustup") {
let mut cmd = Command::new(rustup);
cmd.args(&["run", "nightly", "rustfmt", "--"]);
cmd
} else {
Command::new(rustfmt)
};
cmd
.args(&["--write-mode=display"])
.stdin(Stdio::piped())
.stdout(Stdio::piped());
if let Some(path) = self.options
.rustfmt_configuration_file
.as_ref()
.and_then(|f| f.to_str())
{
cmd.args(&["--config-path", path]);
}
let mut child = cmd.spawn()?;
let mut child_stdin = child.stdin.take().unwrap();
let mut child_stdout = child.stdout.take().unwrap();
let source = source.to_owned();
// Write to stdin in a new thread, so that we can read from stdout on this
// thread. This keeps the child from blocking on writing to its stdout which
// might block us from writing to its stdin.
let stdin_handle = ::std::thread::spawn(move || {
let _ = child_stdin.write_all(source.as_bytes());
source
});
let mut output = vec![];
io::copy(&mut child_stdout, &mut output)?;
let status = child.wait()?;
let source = stdin_handle.join()
.expect("The thread writing to rustfmt's stdin doesn't do \
anything that could panic");
match String::from_utf8(output) {
Ok(bindings) => {
match status.code() {
Some(0) => Ok(Cow::Owned(bindings)),
Some(2) => Err(io::Error::new(
io::ErrorKind::Other,
"Rustfmt parsing errors.".to_string(),
)),
Some(3) => {
warn!("Rustfmt could not format some lines.");
Ok(Cow::Owned(bindings))
}
_ => Err(io::Error::new(
io::ErrorKind::Other,
"Internal rustfmt error".to_string(),
)),
}
},
_ => Ok(Cow::Owned(source))
}
}
}
/// Determines whether the given cursor is in any of the files matched by the
/// options.
fn filter_builtins(ctx: &BindgenContext, cursor: &clang::Cursor) -> bool {
ctx.options().builtins || !cursor.is_builtin()
}
/// Parse one `Item` from the Clang cursor.
fn parse_one(
ctx: &mut BindgenContext,
cursor: clang::Cursor,
parent: Option<ItemId>,
) -> clang_sys::CXChildVisitResult {
if !filter_builtins(ctx, &cursor) {
return CXChildVisit_Continue;
}
use clang_sys::CXChildVisit_Continue;
match Item::parse(cursor, parent, ctx) {
Ok(..) => {}
Err(ParseError::Continue) => {}
Err(ParseError::Recurse) => {
cursor.visit(|child| parse_one(ctx, child, parent));
}
}
CXChildVisit_Continue
}
/// Parse the Clang AST into our `Item` internal representation.
fn parse(context: &mut BindgenContext) -> Result<(), ()> {
use clang_sys::*;
let mut any_error = false;
for d in context.translation_unit().diags().iter() {
let msg = d.format();
let is_err = d.severity() >= CXDiagnostic_Error;
eprintln!("{}, err: {}", msg, is_err);
any_error |= is_err;
}
if any_error {
return Err(());
}
let cursor = context.translation_unit().cursor();
if context.options().emit_ast {
fn dump_if_not_builtin(cur: &clang::Cursor) -> CXChildVisitResult {
if !cur.is_builtin() {
clang::ast_dump(&cur, 0)
} else {
CXChildVisit_Continue
}
}
cursor.visit(|cur| dump_if_not_builtin(&cur));
}
let root = context.root_module();
context.with_module(root, |context| {
cursor.visit(|cursor| parse_one(context, cursor, None))
});
assert!(
context.current_module() == context.root_module(),
"How did this happen?"
);
Ok(())
}
/// Extracted Clang version data
#[derive(Debug)]
pub struct ClangVersion {
/// Major and minor semvar, if parsing was successful
pub parsed: Option<(u32, u32)>,
/// full version string
pub full: String,
}
/// Get the major and the minor semvar numbers of Clang's version
pub fn clang_version() -> ClangVersion {
if !clang_sys::is_loaded() {
// TODO(emilio): Return meaningful error (breaking).
clang_sys::load().expect("Unable to find libclang");
}
let raw_v: String = clang::extract_clang_version();
let split_v: Option<Vec<&str>> = raw_v.split_whitespace().nth(2).map(|v| {
v.split('.').collect()
});
match split_v {
Some(v) => {
if v.len() >= 2 {
let maybe_major = v[0].parse::<u32>();
let maybe_minor = v[1].parse::<u32>();
match (maybe_major, maybe_minor) {
(Ok(major), Ok(minor)) => {
return ClangVersion {
parsed: Some((major, minor)),
full: raw_v.clone(),
}
}
_ => {}
}
}
}
None => {}
};
ClangVersion {
parsed: None,
full: raw_v.clone(),
}
}
/// Test command_line_flag function.
#[test]
fn commandline_flag_unit_test_function() {
//Test 1
let bindings = ::builder();
let command_line_flags = bindings.command_line_flags();
let test_cases = vec![
"--no-derive-default",
"--generate",
"function,types,vars,methods,constructors,destructors",
].iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
assert!(test_cases.iter().all(
|ref x| command_line_flags.contains(x),
));
//Test 2
let bindings = ::builder()
.header("input_header")
.whitelist_type("Distinct_Type")
.whitelist_function("safe_function");
let command_line_flags = bindings.command_line_flags();
let test_cases = vec![
"input_header",
"--no-derive-default",
"--generate",
"function,types,vars,methods,constructors,destructors",
"--whitelist-type",
"Distinct_Type",
"--whitelist-function",
"safe_function",
].iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
println!("{:?}", command_line_flags);
assert!(test_cases.iter().all(
|ref x| command_line_flags.contains(x),
));
}
Don't unwrap header metadata
//! Generate Rust bindings for C and C++ libraries.
//!
//! Provide a C/C++ header file, receive Rust FFI code to call into C/C++
//! functions and use types defined in the header.
//!
//! See the [`Builder`](./struct.Builder.html) struct for usage.
#![deny(missing_docs)]
#![deny(warnings)]
#![deny(unused_extern_crates)]
// To avoid rather annoying warnings when matching with CXCursor_xxx as a
// constant.
#![allow(non_upper_case_globals)]
// `quote!` nests quite deeply.
#![recursion_limit="128"]
extern crate cexpr;
#[macro_use]
#[allow(unused_extern_crates)]
extern crate cfg_if;
extern crate clang_sys;
#[macro_use]
extern crate lazy_static;
extern crate peeking_take_while;
#[macro_use]
extern crate quote;
extern crate regex;
extern crate which;
#[cfg(feature = "logging")]
#[macro_use]
extern crate log;
#[cfg(not(feature = "logging"))]
#[macro_use]
mod log_stubs;
#[macro_use]
mod extra_assertions;
// A macro to declare an internal module for which we *must* provide
// documentation for. If we are building with the "testing_only_docs" feature,
// then the module is declared public, and our `#![deny(missing_docs)]` pragma
// applies to it. This feature is used in CI, so we won't let anything slip by
// undocumented. Normal builds, however, will leave the module private, so that
// we don't expose internals to library consumers.
macro_rules! doc_mod {
($m:ident, $doc_mod_name:ident) => {
cfg_if! {
if #[cfg(feature = "testing_only_docs")] {
pub mod $doc_mod_name {
//! Autogenerated documentation module.
pub use super::$m::*;
}
} else {
}
}
};
}
mod clang;
mod codegen;
mod features;
mod ir;
mod parse;
mod regex_set;
mod time;
pub mod callbacks;
doc_mod!(clang, clang_docs);
doc_mod!(features, features_docs);
doc_mod!(ir, ir_docs);
doc_mod!(parse, parse_docs);
doc_mod!(regex_set, regex_set_docs);
pub use features::{LATEST_STABLE_RUST, RUST_TARGET_STRINGS, RustTarget};
use features::RustFeatures;
use ir::context::{BindgenContext, ItemId};
use ir::item::Item;
use parse::{ClangItemParser, ParseError};
use regex_set::RegexSet;
use std::borrow::Cow;
use std::fs::{File, OpenOptions};
use std::io::{self, Write};
use std::iter;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::Arc;
/// A type used to indicate which kind of items do we have to generate.
///
/// TODO(emilio): Use `bitflags!`
#[derive(Debug, Clone)]
pub struct CodegenConfig {
/// Whether to generate functions.
pub functions: bool,
/// Whether to generate types.
pub types: bool,
/// Whether to generate constants.
pub vars: bool,
/// Whether to generate methods.
pub methods: bool,
/// Whether to generate constructors.
pub constructors: bool,
/// Whether to generate destructors.
pub destructors: bool,
}
impl CodegenConfig {
/// Generate all kinds of items.
pub fn all() -> Self {
CodegenConfig {
functions: true,
types: true,
vars: true,
methods: true,
constructors: true,
destructors: true,
}
}
/// Generate nothing.
pub fn nothing() -> Self {
CodegenConfig {
functions: false,
types: false,
vars: false,
methods: false,
constructors: false,
destructors: false,
}
}
}
impl Default for CodegenConfig {
fn default() -> Self {
CodegenConfig::all()
}
}
/// Configure and generate Rust bindings for a C/C++ header.
///
/// This is the main entry point to the library.
///
/// ```ignore
/// use bindgen::builder;
///
/// // Configure and generate bindings.
/// let bindings = try!(builder().header("path/to/input/header")
/// .whitelisted_type("SomeCoolClass")
/// .whitelisted_function("do_some_cool_thing")
/// .generate());
///
/// // Write the generated bindings to an output file.
/// try!(bindings.write_to_file("path/to/output.rs"));
/// ```
#[derive(Debug, Default)]
pub struct Builder {
options: BindgenOptions,
input_headers: Vec<String>,
// Tuples of unsaved file contents of the form (name, contents).
input_header_contents: Vec<(String, String)>,
}
/// Construct a new [`Builder`](./struct.Builder.html).
pub fn builder() -> Builder {
Default::default()
}
impl Builder {
/// Generates the command line flags use for creating `Builder`.
pub fn command_line_flags(&self) -> Vec<String> {
let mut output_vector: Vec<String> = Vec::new();
if let Some(header) = self.input_headers.last().cloned() {
// Positional argument 'header'
output_vector.push(header);
}
output_vector.push(self.options.rust_target.into());
self.options
.bitfield_enums
.get_items()
.iter()
.map(|item| {
output_vector.push("--bitfield-enum".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.rustified_enums
.get_items()
.iter()
.map(|item| {
output_vector.push("--rustified-enum".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.constified_enum_modules
.get_items()
.iter()
.map(|item| {
output_vector.push("--constified-enum-module".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.blacklisted_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--blacklist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.layout_tests {
output_vector.push("--no-layout-tests".into());
}
if self.options.impl_debug {
output_vector.push("--impl-debug".into());
}
if self.options.impl_partialeq {
output_vector.push("--impl-partialeq".into());
}
if !self.options.derive_copy {
output_vector.push("--no-derive-copy".into());
}
if !self.options.derive_debug {
output_vector.push("--no-derive-debug".into());
}
if !self.options.derive_default {
output_vector.push("--no-derive-default".into());
} else {
output_vector.push("--with-derive-default".into());
}
if self.options.derive_hash {
output_vector.push("--with-derive-hash".into());
}
if self.options.derive_partialord {
output_vector.push("--with-derive-partialord".into());
}
if self.options.derive_ord {
output_vector.push("--with-derive-ord".into());
}
if self.options.derive_partialeq {
output_vector.push("--with-derive-partialeq".into());
}
if self.options.derive_eq {
output_vector.push("--with-derive-eq".into());
}
if self.options.time_phases {
output_vector.push("--time-phases".into());
}
if !self.options.generate_comments {
output_vector.push("--no-doc-comments".into());
}
if !self.options.whitelist_recursively {
output_vector.push("--no-recursive-whitelist".into());
}
if self.options.objc_extern_crate {
output_vector.push("--objc-extern-crate".into());
}
if self.options.builtins {
output_vector.push("--builtins".into());
}
if let Some(ref prefix) = self.options.ctypes_prefix {
output_vector.push("--ctypes-prefix".into());
output_vector.push(prefix.clone());
}
if self.options.emit_ast {
output_vector.push("--emit-clang-ast".into());
}
if self.options.emit_ir {
output_vector.push("--emit-ir".into());
}
if let Some(ref graph) = self.options.emit_ir_graphviz {
output_vector.push("--emit-ir-graphviz".into());
output_vector.push(graph.clone())
}
if self.options.enable_cxx_namespaces {
output_vector.push("--enable-cxx-namespaces".into());
}
if self.options.disable_name_namespacing {
output_vector.push("--disable-name-namespacing".into());
}
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--framework".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.codegen_config.functions {
output_vector.push("--ignore-functions".into());
}
output_vector.push("--generate".into());
//Temporary placeholder for below 4 options
let mut options: Vec<String> = Vec::new();
if self.options.codegen_config.functions {
options.push("function".into());
}
if self.options.codegen_config.types {
options.push("types".into());
}
if self.options.codegen_config.vars {
options.push("vars".into());
}
if self.options.codegen_config.methods {
options.push("methods".into());
}
if self.options.codegen_config.constructors {
options.push("constructors".into());
}
if self.options.codegen_config.destructors {
options.push("destructors".into());
}
output_vector.push(options.join(","));
if !self.options.codegen_config.methods {
output_vector.push("--ignore-methods".into());
}
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--clang-args".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.convert_floats {
output_vector.push("--no-convert-floats".into());
}
if !self.options.prepend_enum_name {
output_vector.push("--no-prepend-enum-name".into());
}
self.options
.opaque_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--opaque-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.raw_lines
.iter()
.map(|item| {
output_vector.push("--raw-line".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--static".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if self.options.use_core {
output_vector.push("--use-core".into());
}
if self.options.conservative_inline_namespaces {
output_vector.push("--conservative-inline-namespaces".into());
}
self.options
.whitelisted_functions
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-function".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.whitelisted_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.whitelisted_vars
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-var".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
output_vector.push("--".into());
if !self.options.clang_args.is_empty() {
output_vector.extend(self.options.clang_args.iter().cloned());
}
if self.input_headers.len() > 1 {
output_vector.extend(
self.input_headers[..self.input_headers.len() - 1]
.iter()
.cloned(),
);
}
if !self.options.rustfmt_bindings {
output_vector.push("--no-rustfmt-bindings".into());
}
if let Some(path) = self.options
.rustfmt_configuration_file
.as_ref()
.and_then(|f| f.to_str())
{
output_vector.push("--rustfmt-configuration-file".into());
output_vector.push(path.into());
}
self.options
.no_partialeq_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--no-partialeq".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.no_copy_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--no-copy".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.no_hash_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--no-hash".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
output_vector
}
/// Add an input C/C++ header to generate bindings for.
///
/// This can be used to generate bindings to a single header:
///
/// ```ignore
/// let bindings = bindgen::Builder::default()
/// .header("input.h")
/// .generate()
/// .unwrap();
/// ```
///
/// Or you can invoke it multiple times to generate bindings to multiple
/// headers:
///
/// ```ignore
/// let bindings = bindgen::Builder::default()
/// .header("first.h")
/// .header("second.h")
/// .header("third.h")
/// .generate()
/// .unwrap();
/// ```
pub fn header<T: Into<String>>(mut self, header: T) -> Builder {
self.input_headers.push(header.into());
self
}
/// Add `contents` as an input C/C++ header named `name`.
///
/// The file `name` will be added to the clang arguments.
pub fn header_contents(mut self, name: &str, contents: &str) -> Builder {
self.input_header_contents.push(
(name.into(), contents.into()),
);
self
}
/// Specify the rust target
///
/// The default is the latest stable Rust version
pub fn rust_target(mut self, rust_target: RustTarget) -> Self {
self.options.set_rust_target(rust_target);
self
}
/// Set the output graphviz file.
pub fn emit_ir_graphviz<T: Into<String>>(mut self, path: T) -> Builder {
let path = path.into();
self.options.emit_ir_graphviz = Some(path);
self
}
/// Whether the generated bindings should contain documentation comments or
/// not.
///
/// This ideally will always be true, but it may need to be false until we
/// implement some processing on comments to work around issues as described
/// in:
///
/// https://github.com/rust-lang-nursery/rust-bindgen/issues/426
pub fn generate_comments(mut self, doit: bool) -> Self {
self.options.generate_comments = doit;
self
}
/// Whether to whitelist recursively or not. Defaults to true.
///
/// Given that we have explicitly whitelisted the "initiate_dance_party"
/// function in this C header:
///
/// ```c
/// typedef struct MoonBoots {
/// int bouncy_level;
/// } MoonBoots;
///
/// void initiate_dance_party(MoonBoots* boots);
/// ```
///
/// We would normally generate bindings to both the `initiate_dance_party`
/// function and the `MoonBoots` struct that it transitively references. By
/// configuring with `whitelist_recursively(false)`, `bindgen` will not emit
/// bindings for anything except the explicitly whitelisted items, and there
/// would be no emitted struct definition for `MoonBoots`. However, the
/// `initiate_dance_party` function would still reference `MoonBoots`!
///
/// **Disabling this feature will almost certainly cause `bindgen` to emit
/// bindings that will not compile!** If you disable this feature, then it
/// is *your* responsiblity to provide definitions for every type that is
/// referenced from an explicitly whitelisted item. One way to provide the
/// definitions is by using the [`Builder::raw_line`](#method.raw_line)
/// method, another would be to define them in Rust and then `include!(...)`
/// the bindings immediately afterwards.
pub fn whitelist_recursively(mut self, doit: bool) -> Self {
self.options.whitelist_recursively = doit;
self
}
/// Generate `#[macro_use] extern crate objc;` instead of `use objc;`
/// in the prologue of the files generated from objective-c files
pub fn objc_extern_crate(mut self, doit: bool) -> Self {
self.options.objc_extern_crate = doit;
self
}
/// Whether to use the clang-provided name mangling. This is true by default
/// and probably needed for C++ features.
///
/// However, some old libclang versions seem to return incorrect results in
/// some cases for non-mangled functions, see [1], so we allow disabling it.
///
/// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/528
pub fn trust_clang_mangling(mut self, doit: bool) -> Self {
self.options.enable_mangling = doit;
self
}
/// Hide the given type from the generated bindings. Regular expressions are
/// supported.
#[deprecated = "Use blacklist_type instead"]
pub fn hide_type<T: AsRef<str>>(self, arg: T) -> Builder {
self.blacklist_type(arg)
}
/// Hide the given type from the generated bindings. Regular expressions are
/// supported.
pub fn blacklist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.blacklisted_types.insert(arg);
self
}
/// Treat the given type as opaque in the generated bindings. Regular
/// expressions are supported.
pub fn opaque_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.opaque_types.insert(arg);
self
}
/// Whitelist the given type so that it (and all types that it transitively
/// refers to) appears in the generated bindings. Regular expressions are
/// supported.
#[deprecated = "use whitelist_type instead"]
pub fn whitelisted_type<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_type(arg)
}
/// Whitelist the given type so that it (and all types that it transitively
/// refers to) appears in the generated bindings. Regular expressions are
/// supported.
pub fn whitelist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.whitelisted_types.insert(arg);
self
}
/// Whitelist the given function so that it (and all types that it
/// transitively refers to) appears in the generated bindings. Regular
/// expressions are supported.
pub fn whitelist_function<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.whitelisted_functions.insert(arg);
self
}
/// Whitelist the given function.
///
/// Deprecated: use whitelist_function instead.
#[deprecated = "use whitelist_function instead"]
pub fn whitelisted_function<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_function(arg)
}
/// Whitelist the given variable so that it (and all types that it
/// transitively refers to) appears in the generated bindings. Regular
/// expressions are supported.
pub fn whitelist_var<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.whitelisted_vars.insert(arg);
self
}
/// Whitelist the given variable.
///
/// Deprecated: use whitelist_var instead.
#[deprecated = "use whitelist_var instead"]
pub fn whitelisted_var<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_var(arg)
}
/// Mark the given enum (or set of enums, if using a pattern) as being
/// bitfield-like. Regular expressions are supported.
///
/// This makes bindgen generate a type that isn't a rust `enum`. Regular
/// expressions are supported.
pub fn bitfield_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.bitfield_enums.insert(arg);
self
}
/// Mark the given enum (or set of enums, if using a pattern) as a Rust
/// enum.
///
/// This makes bindgen generate enums instead of constants. Regular
/// expressions are supported.
///
/// **Use this with caution.** You should not be using Rust enums unless
/// you have complete control of the C/C++ code that you're binding to.
/// Take a look at https://github.com/rust-lang/rust/issues/36927 for
/// more information.
pub fn rustified_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.rustified_enums.insert(arg);
self
}
/// Mark the given enum (or set of enums, if using a pattern) as a set of
/// constants that should be put into a module.
///
/// This makes bindgen generate modules containing constants instead of
/// just constants. Regular expressions are supported.
pub fn constified_enum_module<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.constified_enum_modules.insert(arg);
self
}
/// Add a string to prepend to the generated bindings. The string is passed
/// through without any modification.
pub fn raw_line<T: Into<String>>(mut self, arg: T) -> Builder {
self.options.raw_lines.push(arg.into());
self
}
/// Add an argument to be passed straight through to clang.
pub fn clang_arg<T: Into<String>>(mut self, arg: T) -> Builder {
self.options.clang_args.push(arg.into());
self
}
/// Add arguments to be passed straight through to clang.
pub fn clang_args<I>(mut self, iter: I) -> Builder
where
I: IntoIterator,
I::Item: AsRef<str>,
{
for arg in iter {
self = self.clang_arg(arg.as_ref())
}
self
}
/// Make the generated bindings link the given shared library.
pub fn link<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push((library.into(), LinkType::Default));
self
}
/// Make the generated bindings link the given static library.
pub fn link_static<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push((library.into(), LinkType::Static));
self
}
/// Make the generated bindings link the given framework.
pub fn link_framework<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push(
(library.into(), LinkType::Framework),
);
self
}
/// Emit bindings for builtin definitions (for example `__builtin_va_list`)
/// in the generated Rust.
pub fn emit_builtins(mut self) -> Builder {
self.options.builtins = true;
self
}
/// Avoid converting floats to `f32`/`f64` by default.
pub fn no_convert_floats(mut self) -> Self {
self.options.convert_floats = false;
self
}
/// Set whether layout tests should be generated.
pub fn layout_tests(mut self, doit: bool) -> Self {
self.options.layout_tests = doit;
self
}
/// Set whether `Debug` should be implemented, if it can not be derived automatically.
pub fn impl_debug(mut self, doit: bool) -> Self {
self.options.impl_debug = doit;
self
}
/// Set whether `PartialEq` should be implemented, if it can not be derived automatically.
pub fn impl_partialeq(mut self, doit: bool) -> Self {
self.options.impl_partialeq = doit;
self
}
/// Set whether `Copy` should be derived by default.
pub fn derive_copy(mut self, doit: bool) -> Self {
self.options.derive_copy = doit;
self
}
/// Set whether `Debug` should be derived by default.
pub fn derive_debug(mut self, doit: bool) -> Self {
self.options.derive_debug = doit;
self
}
/// Set whether `Default` should be derived by default.
pub fn derive_default(mut self, doit: bool) -> Self {
self.options.derive_default = doit;
self
}
/// Set whether `Hash` should be derived by default.
pub fn derive_hash(mut self, doit: bool) -> Self {
self.options.derive_hash = doit;
self
}
/// Set whether `PartialOrd` should be derived by default.
/// If we don't compute partialord, we also cannot compute
/// ord. Set the derive_ord to `false` when doit is `false`.
pub fn derive_partialord(mut self, doit: bool) -> Self {
self.options.derive_partialord = doit;
if !doit {
self.options.derive_ord = false;
}
self
}
/// Set whether `Ord` should be derived by default.
/// We can't compute `Ord` without computing `PartialOrd`,
/// so we set the same option to derive_partialord.
pub fn derive_ord(mut self, doit: bool) -> Self {
self.options.derive_ord = doit;
self.options.derive_partialord = doit;
self
}
/// Set whether `PartialEq` should be derived by default.
///
/// If we don't derive `PartialEq`, we also cannot derive `Eq`, so deriving
/// `Eq` is also disabled when `doit` is `false`.
pub fn derive_partialeq(mut self, doit: bool) -> Self {
self.options.derive_partialeq = doit;
if !doit {
self.options.derive_eq = false;
}
self
}
/// Set whether `Eq` should be derived by default.
///
/// We can't derive `Eq` without also deriving `PartialEq`, so we also
/// enable deriving `PartialEq` when `doit` is `true`.
pub fn derive_eq(mut self, doit: bool) -> Self {
self.options.derive_eq = doit;
if doit {
self.options.derive_partialeq = doit;
}
self
}
/// Set whether or not to time bindgen phases, and print information to
/// stderr.
pub fn time_phases(mut self, doit: bool) -> Self {
self.options.time_phases = doit;
self
}
/// Emit Clang AST.
pub fn emit_clang_ast(mut self) -> Builder {
self.options.emit_ast = true;
self
}
/// Emit IR.
pub fn emit_ir(mut self) -> Builder {
self.options.emit_ir = true;
self
}
/// Enable C++ namespaces.
pub fn enable_cxx_namespaces(mut self) -> Builder {
self.options.enable_cxx_namespaces = true;
self
}
/// Disable name auto-namespacing.
///
/// By default, bindgen mangles names like `foo::bar::Baz` to look like
/// `foo_bar_Baz` instead of just `Baz`.
///
/// This method disables that behavior.
///
/// Note that this intentionally does not change the names used for
/// whitelisting and blacklisting, which should still be mangled with the
/// namespaces.
///
/// Note, also, that this option may cause bindgen to generate duplicate
/// names.
pub fn disable_name_namespacing(mut self) -> Builder {
self.options.disable_name_namespacing = true;
self
}
/// Treat inline namespaces conservatively.
///
/// This is tricky, because in C++ is technically legal to override an item
/// defined in an inline namespace:
///
/// ```cpp
/// inline namespace foo {
/// using Bar = int;
/// }
/// using Bar = long;
/// ```
///
/// Even though referencing `Bar` is a compiler error.
///
/// We want to support this (arguably esoteric) use case, but we don't want
/// to make the rest of bindgen users pay an usability penalty for that.
///
/// To support this, we need to keep all the inline namespaces around, but
/// then bindgen usage is a bit more difficult, because you cannot
/// reference, e.g., `std::string` (you'd need to use the proper inline
/// namespace).
///
/// We could complicate a lot of the logic to detect name collisions, and if
/// not detected generate a `pub use inline_ns::*` or something like that.
///
/// That's probably something we can do if we see this option is needed in a
/// lot of cases, to improve it's usability, but my guess is that this is
/// not going to be too useful.
pub fn conservative_inline_namespaces(mut self) -> Builder {
self.options.conservative_inline_namespaces = true;
self
}
/// Whether inline functions should be generated or not.
///
/// Note that they will usually not work. However you can use
/// `-fkeep-inline-functions` or `-fno-inline-functions` if you are
/// responsible of compiling the library to make them callable.
pub fn generate_inline_functions(mut self, doit: bool) -> Self {
self.options.generate_inline_functions = doit;
self
}
/// Ignore functions.
pub fn ignore_functions(mut self) -> Builder {
self.options.codegen_config.functions = false;
self
}
/// Ignore methods.
pub fn ignore_methods(mut self) -> Builder {
self.options.codegen_config.methods = false;
self
}
/// Avoid generating any unstable Rust, such as Rust unions, in the generated bindings.
#[deprecated(note = "please use `rust_target` instead")]
pub fn unstable_rust(self, doit: bool) -> Self {
let rust_target = if doit {
RustTarget::Nightly
} else {
LATEST_STABLE_RUST
};
self.rust_target(rust_target)
}
/// Use core instead of libstd in the generated bindings.
pub fn use_core(mut self) -> Builder {
self.options.use_core = true;
self
}
/// Use the given prefix for the raw types instead of `::std::os::raw`.
pub fn ctypes_prefix<T: Into<String>>(mut self, prefix: T) -> Builder {
self.options.ctypes_prefix = Some(prefix.into());
self
}
/// Allows configuring types in different situations, see the
/// [`ParseCallbacks`](./callbacks/trait.ParseCallbacks.html) documentation.
pub fn parse_callbacks(
mut self,
cb: Box<callbacks::ParseCallbacks>,
) -> Self {
self.options.parse_callbacks = Some(cb);
self
}
/// Choose what to generate using a
/// [`CodegenConfig`](./struct.CodegenConfig.html).
pub fn with_codegen_config(mut self, config: CodegenConfig) -> Self {
self.options.codegen_config = config;
self
}
/// Prepend the enum name to constant or bitfield variants.
pub fn prepend_enum_name(mut self, doit: bool) -> Self {
self.options.prepend_enum_name = doit;
self
}
/// Set whether rustfmt should format the generated bindings.
pub fn rustfmt_bindings(mut self, doit: bool) -> Self {
self.options.rustfmt_bindings = doit;
self
}
/// Set the absolute path to the rustfmt configuration file, if None, the standard rustfmt
/// options are used.
pub fn rustfmt_configuration_file(mut self, path: Option<PathBuf>) -> Self {
self = self.rustfmt_bindings(true);
self.options.rustfmt_configuration_file = path;
self
}
/// Generate the Rust bindings using the options built up thus far.
pub fn generate(mut self) -> Result<Bindings, ()> {
self.options.input_header = self.input_headers.pop();
self.options.clang_args.extend(
self.input_headers
.drain(..)
.flat_map(|header| {
iter::once("-include".into()).chain(iter::once(header))
}),
);
self.options.input_unsaved_files.extend(
self.input_header_contents.drain(..).map(|(name, contents)| {
clang::UnsavedFile::new(&name, &contents)
}),
);
Bindings::generate(self.options)
}
/// Preprocess and dump the input header files to disk.
///
/// This is useful when debugging bindgen, using C-Reduce, or when filing
/// issues. The resulting file will be named something like `__bindgen.i` or
/// `__bindgen.ii`
pub fn dump_preprocessed_input(&self) -> io::Result<()> {
let clang = clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "Cannot find clang executable")
})?;
// The contents of a wrapper file that includes all the input header
// files.
let mut wrapper_contents = String::new();
// Whether we are working with C or C++ inputs.
let mut is_cpp = false;
// For each input header, add `#include "$header"`.
for header in &self.input_headers {
is_cpp |= header.ends_with(".hpp");
wrapper_contents.push_str("#include \"");
wrapper_contents.push_str(header);
wrapper_contents.push_str("\"\n");
}
// For each input header content, add a prefix line of `#line 0 "$name"`
// followed by the contents.
for &(ref name, ref contents) in &self.input_header_contents {
is_cpp |= name.ends_with(".hpp");
wrapper_contents.push_str("#line 0 \"");
wrapper_contents.push_str(name);
wrapper_contents.push_str("\"\n");
wrapper_contents.push_str(contents);
}
is_cpp |= self.options.clang_args.windows(2).any(|w| {
w[0] == "-x=c++" || w[1] == "-x=c++" || w == &["-x", "c++"]
});
let wrapper_path = PathBuf::from(if is_cpp {
"__bindgen.cpp"
} else {
"__bindgen.c"
});
{
let mut wrapper_file = File::create(&wrapper_path)?;
wrapper_file.write(wrapper_contents.as_bytes())?;
}
let mut cmd = Command::new(&clang.path);
cmd.arg("-save-temps")
.arg("-E")
.arg("-C")
.arg("-c")
.arg(&wrapper_path)
.stdout(Stdio::piped());
for a in &self.options.clang_args {
cmd.arg(a);
}
let mut child = cmd.spawn()?;
let mut preprocessed = child.stdout.take().unwrap();
let mut file = File::create(if is_cpp {
"__bindgen.ii"
} else {
"__bindgen.i"
})?;
io::copy(&mut preprocessed, &mut file)?;
if child.wait()?.success() {
Ok(())
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"clang exited with non-zero status",
))
}
}
/// Don't derive `PartialEq` for a given type. Regular
/// expressions are supported.
pub fn no_partialeq(mut self, arg: String) -> Builder {
self.options.no_partialeq_types.insert(arg);
self
}
/// Don't derive `Copy` for a given type. Regular
/// expressions are supported.
pub fn no_copy(mut self, arg: String) -> Self {
self.options.no_copy_types.insert(arg);
self
}
/// Don't derive `Hash` for a given type. Regular
/// expressions are supported.
pub fn no_hash(mut self, arg: String) -> Builder {
self.options.no_hash_types.insert(arg);
self
}
}
/// Configuration options for generated bindings.
#[derive(Debug)]
struct BindgenOptions {
/// The set of types that have been blacklisted and should not appear
/// anywhere in the generated code.
blacklisted_types: RegexSet,
/// The set of types that should be treated as opaque structures in the
/// generated code.
opaque_types: RegexSet,
/// The set of types that we should have bindings for in the generated
/// code.
///
/// This includes all types transitively reachable from any type in this
/// set. One might think of whitelisted types/vars/functions as GC roots,
/// and the generated Rust code as including everything that gets marked.
whitelisted_types: RegexSet,
/// Whitelisted functions. See docs for `whitelisted_types` for more.
whitelisted_functions: RegexSet,
/// Whitelisted variables. See docs for `whitelisted_types` for more.
whitelisted_vars: RegexSet,
/// The enum patterns to mark an enum as bitfield.
bitfield_enums: RegexSet,
/// The enum patterns to mark an enum as a Rust enum.
rustified_enums: RegexSet,
/// The enum patterns to mark an enum as a module of constants.
constified_enum_modules: RegexSet,
/// Whether we should generate builtins or not.
builtins: bool,
/// The set of libraries we should link in the generated Rust code.
links: Vec<(String, LinkType)>,
/// True if we should dump the Clang AST for debugging purposes.
emit_ast: bool,
/// True if we should dump our internal IR for debugging purposes.
emit_ir: bool,
/// Output graphviz dot file.
emit_ir_graphviz: Option<String>,
/// True if we should emulate C++ namespaces with Rust modules in the
/// generated bindings.
enable_cxx_namespaces: bool,
/// True if we should avoid mangling names with namespaces.
disable_name_namespacing: bool,
/// True if we should generate layout tests for generated structures.
layout_tests: bool,
/// True if we should implement the Debug trait for C/C++ structures and types
/// that do not support automatically deriving Debug.
impl_debug: bool,
/// True if we should implement the PartialEq trait for C/C++ structures and types
/// that do not support autoamically deriving PartialEq.
impl_partialeq: bool,
/// True if we should derive Copy trait implementations for C/C++ structures
/// and types.
derive_copy: bool,
/// True if we should derive Debug trait implementations for C/C++ structures
/// and types.
derive_debug: bool,
/// True if we should derive Default trait implementations for C/C++ structures
/// and types.
derive_default: bool,
/// True if we should derive Hash trait implementations for C/C++ structures
/// and types.
derive_hash: bool,
/// True if we should derive PartialOrd trait implementations for C/C++ structures
/// and types.
derive_partialord: bool,
/// True if we should derive Ord trait implementations for C/C++ structures
/// and types.
derive_ord: bool,
/// True if we should derive PartialEq trait implementations for C/C++ structures
/// and types.
derive_partialeq: bool,
/// True if we should derive Eq trait implementations for C/C++ structures
/// and types.
derive_eq: bool,
/// True if we should avoid using libstd to use libcore instead.
use_core: bool,
/// An optional prefix for the "raw" types, like `c_int`, `c_void`...
ctypes_prefix: Option<String>,
/// Whether to time the bindgen phases.
time_phases: bool,
/// True if we should generate constant names that are **directly** under
/// namespaces.
namespaced_constants: bool,
/// True if we should use MSVC name mangling rules.
msvc_mangling: bool,
/// Whether we should convert float types to f32/f64 types.
convert_floats: bool,
/// The set of raw lines to prepend to the generated Rust code.
raw_lines: Vec<String>,
/// The set of arguments to pass straight through to Clang.
clang_args: Vec<String>,
/// The input header file.
input_header: Option<String>,
/// Unsaved files for input.
input_unsaved_files: Vec<clang::UnsavedFile>,
/// A user-provided visitor to allow customizing different kinds of
/// situations.
parse_callbacks: Option<Box<callbacks::ParseCallbacks>>,
/// Which kind of items should we generate? By default, we'll generate all
/// of them.
codegen_config: CodegenConfig,
/// Whether to treat inline namespaces conservatively.
///
/// See the builder method description for more details.
conservative_inline_namespaces: bool,
/// Wether to keep documentation comments in the generated output. See the
/// documentation for more details.
generate_comments: bool,
/// Whether to generate inline functions. Defaults to false.
generate_inline_functions: bool,
/// Wether to whitelist types recursively. Defaults to true.
whitelist_recursively: bool,
/// Intead of emitting 'use objc;' to files generated from objective c files,
/// generate '#[macro_use] extern crate objc;'
objc_extern_crate: bool,
/// Whether to use the clang-provided name mangling. This is true and
/// probably needed for C++ features.
///
/// However, some old libclang versions seem to return incorrect results in
/// some cases for non-mangled functions, see [1], so we allow disabling it.
///
/// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/528
enable_mangling: bool,
/// Whether to prepend the enum name to bitfield or constant variants.
prepend_enum_name: bool,
/// Version of the Rust compiler to target
rust_target: RustTarget,
/// Features to enable, derived from `rust_target`
rust_features: RustFeatures,
/// Whether rustfmt should format the generated bindings.
rustfmt_bindings: bool,
/// The absolute path to the rustfmt configuration file, if None, the standard rustfmt
/// options are used.
rustfmt_configuration_file: Option<PathBuf>,
/// The set of types that we should not derive `PartialEq` for.
no_partialeq_types: RegexSet,
/// The set of types that we should not derive `Copy` for.
no_copy_types: RegexSet,
/// The set of types that we should not derive `Hash` for.
no_hash_types: RegexSet,
}
/// TODO(emilio): This is sort of a lie (see the error message that results from
/// removing this), but since we don't share references across panic boundaries
/// it's ok.
impl ::std::panic::UnwindSafe for BindgenOptions {}
impl BindgenOptions {
fn build(&mut self) {
self.whitelisted_vars.build();
self.whitelisted_types.build();
self.whitelisted_functions.build();
self.blacklisted_types.build();
self.opaque_types.build();
self.bitfield_enums.build();
self.constified_enum_modules.build();
self.rustified_enums.build();
self.no_partialeq_types.build();
self.no_copy_types.build();
self.no_hash_types.build();
}
/// Update rust target version
pub fn set_rust_target(&mut self, rust_target: RustTarget) {
self.rust_target = rust_target;
// Keep rust_features synced with rust_target
self.rust_features = rust_target.into();
}
/// Get features supported by target Rust version
pub fn rust_features(&self) -> RustFeatures {
self.rust_features
}
}
impl Default for BindgenOptions {
fn default() -> BindgenOptions {
let rust_target = RustTarget::default();
BindgenOptions {
rust_target: rust_target,
rust_features: rust_target.into(),
blacklisted_types: Default::default(),
opaque_types: Default::default(),
whitelisted_types: Default::default(),
whitelisted_functions: Default::default(),
whitelisted_vars: Default::default(),
bitfield_enums: Default::default(),
rustified_enums: Default::default(),
constified_enum_modules: Default::default(),
builtins: false,
links: vec![],
emit_ast: false,
emit_ir: false,
emit_ir_graphviz: None,
layout_tests: true,
impl_debug: false,
impl_partialeq: false,
derive_copy: true,
derive_debug: true,
derive_default: false,
derive_hash: false,
derive_partialord: false,
derive_ord: false,
derive_partialeq: false,
derive_eq: false,
enable_cxx_namespaces: false,
disable_name_namespacing: false,
use_core: false,
ctypes_prefix: None,
namespaced_constants: true,
msvc_mangling: false,
convert_floats: true,
raw_lines: vec![],
clang_args: vec![],
input_header: None,
input_unsaved_files: vec![],
parse_callbacks: None,
codegen_config: CodegenConfig::all(),
conservative_inline_namespaces: false,
generate_comments: true,
generate_inline_functions: false,
whitelist_recursively: true,
objc_extern_crate: false,
enable_mangling: true,
prepend_enum_name: true,
time_phases: false,
rustfmt_bindings: true,
rustfmt_configuration_file: None,
no_partialeq_types: Default::default(),
no_copy_types: Default::default(),
no_hash_types: Default::default(),
}
}
}
/// The linking type to use with a given library.
///
/// TODO: #104: This is ignored at the moment, but shouldn't be.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum LinkType {
/// Use shared library linking. This is the default.
Default,
/// Use static linking.
Static,
/// The library is an OSX framework.
Framework,
}
fn ensure_libclang_is_loaded() {
if clang_sys::is_loaded() {
return;
}
// XXX (issue #350): Ensure that our dynamically loaded `libclang`
// doesn't get dropped prematurely, nor is loaded multiple times
// across different threads.
lazy_static! {
static ref LIBCLANG: Arc<clang_sys::SharedLibrary> = {
clang_sys::load().expect("Unable to find libclang");
clang_sys::get_library()
.expect("We just loaded libclang and it had better still be \
here!")
};
}
clang_sys::set_library(Some(LIBCLANG.clone()));
}
/// Generated Rust bindings.
#[derive(Debug)]
pub struct Bindings {
options: BindgenOptions,
module: quote::Tokens,
}
impl Bindings {
/// Generate bindings for the given options.
pub(crate) fn generate(
mut options: BindgenOptions,
) -> Result<Bindings, ()> {
ensure_libclang_is_loaded();
options.build();
// Filter out include paths and similar stuff, so we don't incorrectly
// promote them to `-isystem`.
let clang_args_for_clang_sys = {
let mut last_was_include_prefix = false;
options.clang_args.iter().filter(|arg| {
if last_was_include_prefix {
last_was_include_prefix = false;
return false;
}
let arg = &**arg;
// https://clang.llvm.org/docs/ClangCommandLineReference.html
// -isystem and -isystem-after are harmless.
if arg == "-I" || arg == "--include-directory" {
last_was_include_prefix = true;
return false;
}
if arg.starts_with("-I") || arg.starts_with("--include-directory=") {
return false;
}
true
}).cloned().collect::<Vec<_>>()
};
// TODO: Make this path fixup configurable?
if let Some(clang) = clang_sys::support::Clang::find(
None,
&clang_args_for_clang_sys,
)
{
// If --target is specified, assume caller knows what they're doing
// and don't mess with include paths for them
let has_target_arg = options
.clang_args
.iter()
.rposition(|arg| arg.starts_with("--target"))
.is_some();
if !has_target_arg {
// TODO: distinguish C and C++ paths? C++'s should be enough, I
// guess.
if let Some(cpp_search_paths) = clang.cpp_search_paths {
for path in cpp_search_paths.into_iter() {
if let Ok(path) = path.into_os_string().into_string() {
options.clang_args.push("-isystem".to_owned());
options.clang_args.push(path);
}
}
}
}
}
#[cfg(unix)]
fn can_read(perms: &std::fs::Permissions) -> bool {
use std::os::unix::fs::PermissionsExt;
perms.mode() & 0o444 > 0
}
#[cfg(not(unix))]
fn can_read(_: &std::fs::Permissions) -> bool {
true
}
if let Some(h) = options.input_header.as_ref() {
if let Ok(md) = std::fs::metadata(h) {
if !md.is_file() {
eprintln!("error: '{}' is a folder", h);
return Err(());
}
if !can_read(&md.permissions()) {
eprintln!("error: insufficient permissions to read '{}'", h);
return Err(());
}
options.clang_args.push(h.clone())
} else {
eprintln!("error: header '{}' does not exist.", h);
return Err(());
}
}
for f in options.input_unsaved_files.iter() {
options.clang_args.push(f.name.to_str().unwrap().to_owned())
}
let time_phases = options.time_phases;
let mut context = BindgenContext::new(options);
{
let _t = time::Timer::new("parse")
.with_output(time_phases);
try!(parse(&mut context));
}
let (items, options) = codegen::codegen(context);
Ok(Bindings {
options: options,
module: quote! {
#( #items )*
}
})
}
/// Convert these bindings into source text (with raw lines prepended).
pub fn to_string(&self) -> String {
let mut bytes = vec![];
self.write(Box::new(&mut bytes) as Box<Write>)
.expect("writing to a vec cannot fail");
String::from_utf8(bytes)
.expect("we should only write bindings that are valid utf-8")
}
/// Write these bindings as source text to a file.
pub fn write_to_file<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
let file = OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(path.as_ref())?;
self.write(Box::new(file))?;
Ok(())
}
/// Write these bindings as source text to the given `Write`able.
pub fn write<'a>(&self, mut writer: Box<Write + 'a>) -> io::Result<()> {
writer.write(
"/* automatically generated by rust-bindgen */\n\n".as_bytes(),
)?;
for line in self.options.raw_lines.iter() {
writer.write(line.as_bytes())?;
writer.write("\n".as_bytes())?;
}
if !self.options.raw_lines.is_empty() {
writer.write("\n".as_bytes())?;
}
let bindings = self.module.as_str().to_string();
match self.rustfmt_generated_string(&bindings) {
Ok(rustfmt_bindings) => {
writer.write(rustfmt_bindings.as_bytes())?;
},
Err(err) => {
eprintln!("{:?}", err);
writer.write(bindings.as_str().as_bytes())?;
},
}
Ok(())
}
/// Checks if rustfmt_bindings is set and runs rustfmt on the string
fn rustfmt_generated_string<'a>(
&self,
source: &'a str,
) -> io::Result<Cow<'a, str>> {
let _t = time::Timer::new("rustfmt_generated_string")
.with_output(self.options.time_phases);
if !self.options.rustfmt_bindings {
return Ok(Cow::Borrowed(source));
}
let rustfmt = which::which("rustfmt")
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_owned()))?;
// Prefer using the `rustfmt-nightly` version of `rustmft`, if
// possible. It requires being run via `rustup run nightly ...`.
let mut cmd = if let Ok(rustup) = which::which("rustup") {
let mut cmd = Command::new(rustup);
cmd.args(&["run", "nightly", "rustfmt", "--"]);
cmd
} else {
Command::new(rustfmt)
};
cmd
.args(&["--write-mode=display"])
.stdin(Stdio::piped())
.stdout(Stdio::piped());
if let Some(path) = self.options
.rustfmt_configuration_file
.as_ref()
.and_then(|f| f.to_str())
{
cmd.args(&["--config-path", path]);
}
let mut child = cmd.spawn()?;
let mut child_stdin = child.stdin.take().unwrap();
let mut child_stdout = child.stdout.take().unwrap();
let source = source.to_owned();
// Write to stdin in a new thread, so that we can read from stdout on this
// thread. This keeps the child from blocking on writing to its stdout which
// might block us from writing to its stdin.
let stdin_handle = ::std::thread::spawn(move || {
let _ = child_stdin.write_all(source.as_bytes());
source
});
let mut output = vec![];
io::copy(&mut child_stdout, &mut output)?;
let status = child.wait()?;
let source = stdin_handle.join()
.expect("The thread writing to rustfmt's stdin doesn't do \
anything that could panic");
match String::from_utf8(output) {
Ok(bindings) => {
match status.code() {
Some(0) => Ok(Cow::Owned(bindings)),
Some(2) => Err(io::Error::new(
io::ErrorKind::Other,
"Rustfmt parsing errors.".to_string(),
)),
Some(3) => {
warn!("Rustfmt could not format some lines.");
Ok(Cow::Owned(bindings))
}
_ => Err(io::Error::new(
io::ErrorKind::Other,
"Internal rustfmt error".to_string(),
)),
}
},
_ => Ok(Cow::Owned(source))
}
}
}
/// Determines whether the given cursor is in any of the files matched by the
/// options.
fn filter_builtins(ctx: &BindgenContext, cursor: &clang::Cursor) -> bool {
ctx.options().builtins || !cursor.is_builtin()
}
/// Parse one `Item` from the Clang cursor.
fn parse_one(
ctx: &mut BindgenContext,
cursor: clang::Cursor,
parent: Option<ItemId>,
) -> clang_sys::CXChildVisitResult {
if !filter_builtins(ctx, &cursor) {
return CXChildVisit_Continue;
}
use clang_sys::CXChildVisit_Continue;
match Item::parse(cursor, parent, ctx) {
Ok(..) => {}
Err(ParseError::Continue) => {}
Err(ParseError::Recurse) => {
cursor.visit(|child| parse_one(ctx, child, parent));
}
}
CXChildVisit_Continue
}
/// Parse the Clang AST into our `Item` internal representation.
fn parse(context: &mut BindgenContext) -> Result<(), ()> {
use clang_sys::*;
let mut any_error = false;
for d in context.translation_unit().diags().iter() {
let msg = d.format();
let is_err = d.severity() >= CXDiagnostic_Error;
eprintln!("{}, err: {}", msg, is_err);
any_error |= is_err;
}
if any_error {
return Err(());
}
let cursor = context.translation_unit().cursor();
if context.options().emit_ast {
fn dump_if_not_builtin(cur: &clang::Cursor) -> CXChildVisitResult {
if !cur.is_builtin() {
clang::ast_dump(&cur, 0)
} else {
CXChildVisit_Continue
}
}
cursor.visit(|cur| dump_if_not_builtin(&cur));
}
let root = context.root_module();
context.with_module(root, |context| {
cursor.visit(|cursor| parse_one(context, cursor, None))
});
assert!(
context.current_module() == context.root_module(),
"How did this happen?"
);
Ok(())
}
/// Extracted Clang version data
#[derive(Debug)]
pub struct ClangVersion {
/// Major and minor semvar, if parsing was successful
pub parsed: Option<(u32, u32)>,
/// full version string
pub full: String,
}
/// Get the major and the minor semvar numbers of Clang's version
pub fn clang_version() -> ClangVersion {
if !clang_sys::is_loaded() {
// TODO(emilio): Return meaningful error (breaking).
clang_sys::load().expect("Unable to find libclang");
}
let raw_v: String = clang::extract_clang_version();
let split_v: Option<Vec<&str>> = raw_v.split_whitespace().nth(2).map(|v| {
v.split('.').collect()
});
match split_v {
Some(v) => {
if v.len() >= 2 {
let maybe_major = v[0].parse::<u32>();
let maybe_minor = v[1].parse::<u32>();
match (maybe_major, maybe_minor) {
(Ok(major), Ok(minor)) => {
return ClangVersion {
parsed: Some((major, minor)),
full: raw_v.clone(),
}
}
_ => {}
}
}
}
None => {}
};
ClangVersion {
parsed: None,
full: raw_v.clone(),
}
}
/// Test command_line_flag function.
#[test]
fn commandline_flag_unit_test_function() {
//Test 1
let bindings = ::builder();
let command_line_flags = bindings.command_line_flags();
let test_cases = vec![
"--no-derive-default",
"--generate",
"function,types,vars,methods,constructors,destructors",
].iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
assert!(test_cases.iter().all(
|ref x| command_line_flags.contains(x),
));
//Test 2
let bindings = ::builder()
.header("input_header")
.whitelist_type("Distinct_Type")
.whitelist_function("safe_function");
let command_line_flags = bindings.command_line_flags();
let test_cases = vec![
"input_header",
"--no-derive-default",
"--generate",
"function,types,vars,methods,constructors,destructors",
"--whitelist-type",
"Distinct_Type",
"--whitelist-function",
"safe_function",
].iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
println!("{:?}", command_line_flags);
assert!(test_cases.iter().all(
|ref x| command_line_flags.contains(x),
));
}
|
#![feature(const_ptr_offset, lang_items)]
#![feature(asm_const)]
#![feature(asm_sym)]
#![feature(allocator_api)]
#![feature(panic_info_message)]
#![feature(naked_functions)]
#![feature(abi_x86_interrupt)]
#![feature(specialization)]
#![feature(linked_list_cursors)]
#![feature(const_fn_fn_ptr_basics)]
#![no_std]
extern crate alloc;
extern crate spin;
#[cfg(target_arch = "x86_64")]
extern crate x86;
#[macro_use]
extern crate bitflags;
extern crate goblin;
extern crate num_traits;
// These need to be visible to the linker, so we need to export them.
#[cfg(target_arch = "x86_64")]
pub use arch::processor::*;
use core::panic::PanicInfo;
pub use logging::*;
#[macro_use]
pub mod macros;
#[macro_use]
pub mod logging;
pub mod arch;
pub mod collections;
pub mod console;
pub mod consts;
pub mod errno;
pub mod fs;
pub mod mm;
pub mod scheduler;
pub mod synch;
pub mod syscall;
#[global_allocator]
static ALLOCATOR: &'static mm::allocator::Allocator = &mm::allocator::Allocator;
/// This function is called on panic.
#[cfg(not(test))]
#[panic_handler]
pub fn panic(info: &PanicInfo) -> ! {
let tid = scheduler::get_current_taskid();
print!("[!!!PANIC from task {}!!!] ", tid);
if let Some(location) = info.location() {
print!("{}:{}: ", location.file(), location.line());
}
if let Some(message) = info.message() {
print!("{}", message);
}
print!("\n");
loop {
halt();
}
}
minor changes to pass the format check
#![feature(const_ptr_offset, lang_items)]
#![feature(asm_const)]
#![feature(asm_sym)]
#![feature(allocator_api)]
#![feature(panic_info_message)]
#![feature(naked_functions)]
#![feature(abi_x86_interrupt)]
#![feature(specialization)]
#![feature(linked_list_cursors)]
#![feature(const_fn_fn_ptr_basics)]
#![no_std]
extern crate alloc;
extern crate spin;
#[cfg(target_arch = "x86_64")]
extern crate x86;
#[macro_use]
extern crate bitflags;
extern crate goblin;
extern crate num_traits;
// These need to be visible to the linker, so we need to export them.
#[cfg(target_arch = "x86_64")]
pub use arch::processor::*;
use core::panic::PanicInfo;
pub use logging::*;
#[macro_use]
pub mod macros;
#[macro_use]
pub mod logging;
pub mod arch;
pub mod collections;
pub mod console;
pub mod consts;
pub mod errno;
pub mod fs;
pub mod mm;
pub mod scheduler;
pub mod synch;
pub mod syscall;
#[global_allocator]
static ALLOCATOR: &'static mm::allocator::Allocator = &mm::allocator::Allocator;
/// This function is called on panic.
#[cfg(not(test))]
#[panic_handler]
pub fn panic(info: &PanicInfo) -> ! {
let tid = scheduler::get_current_taskid();
print!("[!!!PANIC from task {}!!!] ", tid);
if let Some(location) = info.location() {
print!("{}:{}: ", location.file(), location.line());
}
if let Some(message) = info.message() {
print!("{}", message);
}
print!("\n");
loop {
halt();
}
}
|
pub mod crypto {
pub fn crypto_mod_test(){
println!("Crypto Mod Test");
}
}
pub mod sql {
extern crate rpassword;
extern crate rusqlite;
use std::path::Path;
use self::rusqlite::Connection;
pub fn sql_mod_test(){
println!("SQL Mod Test");
}
pub fn open_db(filepath: &String) -> Connection {
use std::convert;
let path = Path::new(filepath);
//TODO see if the db exists, to take user creds if not.
let mut db_exists = true;
if !path.exists() { //TODO see if the is_file check should be there.
db_exists = false;
}
let conn: rusqlite::Connection = Connection::open(&path).expect("Could not open a connection to the database.");
conn.execute("CREATE TABLE IF NOT EXISTS user (password TEXT);",&[]).expect("Unable to create table.");
if !db_exists {
use std::io;
println!("Enter a password for this database.");
let mut password = rpassword::prompt_password_stdout("Password: ").unwrap();
password = password.trim().to_string();
insert_user(&conn, &password);
}
return conn;
}
fn insert_user(conn: &Connection, pass:&String) {
conn.execute("INSERT into user(password) VALUES (?)",&[pass]).expect("Could not add password to the user table.");
}
}
Added warning about the password not being displayed.
pub mod crypto {
pub fn crypto_mod_test(){
println!("Crypto Mod Test");
}
}
pub mod sql {
extern crate rpassword;
extern crate rusqlite;
use std::path::Path;
use self::rusqlite::Connection;
pub fn sql_mod_test(){
println!("SQL Mod Test");
}
pub fn open_db(filepath: &String) -> Connection {
use std::convert;
let path = Path::new(filepath);
//TODO see if the db exists, to take user creds if not.
let mut db_exists = true;
if !path.exists() { //TODO see if the is_file check should be there.
db_exists = false;
}
let conn: rusqlite::Connection = Connection::open(&path).expect("Could not open a connection to the database.");
conn.execute("CREATE TABLE IF NOT EXISTS user (password TEXT);",&[]).expect("Unable to create table.");
if !db_exists {
use std::io;
println!("Enter a password for this database.\nNote: You will not be able to see the password as you are entering it.");
let mut password = rpassword::prompt_password_stdout("Password: ").unwrap();
password = password.trim().to_string();
insert_user(&conn, &password);
}
return conn;
}
fn insert_user(conn: &Connection, pass:&String) {
conn.execute("INSERT into user(password) VALUES (?)",&[pass]).expect("Could not add password to the user table.");
}
}
|
extern crate sdl2;
use sdl2::pixels::Color;
use sdl2::event::Event::*;
use sdl2::keyboard::Keycode::*;
use std::thread;
pub fn ralf() {
println!("ralf");
let sdl_context = sdl2::init().unwrap();
let timer = sdl_context.timer().unwrap();
let mut event_pump = sdl_context.event_pump().unwrap();
let video = sdl_context.video().unwrap();
let window = video.window("Ruffel", 800, 600)
.position_centered().opengl()
.build().unwrap();
let mut renderer = window.renderer()
.accelerated()
.build().unwrap();
let mut done = false;
while !done {
renderer.set_draw_color(Color::RGB(0, 0, 0));
renderer.clear();
renderer.present();
thread::sleep_ms(1/60);
for event in event_pump.poll_iter() {
match event {
Quit { .. } => done = true,
KeyDown { keycode, .. } => match keycode {
Some(Escape) => done = true,
_ => {}
},
_ => {}
}
}
}
println!("finish");
}
Proper time keeping
extern crate sdl2;
use sdl2::pixels::Color;
use sdl2::event::Event::*;
use sdl2::keyboard::Keycode::*;
use std::thread;
use std::time::Duration;
pub fn ralf() {
println!("ralf");
let sdl_context = sdl2::init().unwrap();
let mut timer = sdl_context.timer().unwrap();
let mut event_pump = sdl_context.event_pump().unwrap();
let video = sdl_context.video().unwrap();
let window = video.window("Ruffel", 800, 600)
.position_centered().opengl()
.build().unwrap();
let mut renderer = window.renderer()
.accelerated()
.build().unwrap();
let mut done = false;
let mut delta = Duration::new(0, 0);
while !done {
let start_time = timer.ticks();
renderer.set_draw_color(Color::RGB(0, 0, 0));
renderer.clear();
renderer.present();
thread::sleep_ms(1000/60);
for event in event_pump.poll_iter() {
match event {
Quit { .. } => done = true,
KeyDown { keycode, .. } => match keycode {
Some(Escape) => done = true,
_ => {}
},
_ => {}
}
}
let end_time = timer.ticks();
delta = Duration::from_millis((end_time - start_time) as u64);
println!("{:?}", delta);
}
}
|
extern crate bytes;
extern crate futures;
extern crate kcp;
extern crate mio;
#[macro_use]
extern crate tokio_core;
extern crate tokio_io;
extern crate rand;
extern crate time;
use std::cell::RefCell;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use std::net::SocketAddr;
use std::rc::Rc;
use std::time::{Duration, Instant};
use bytes::BytesMut;
use bytes::buf::FromBuf;
use futures::{Poll, Async, Future};
use futures::stream::Stream;
use kcp::prelude::*;
use mio::{Ready, Registration, PollOpt, Token, SetReadiness};
use mio::event::Evented;
use tokio_core::net::UdpSocket;
use tokio_core::reactor::{Handle, PollEvented, Timeout};
use tokio_io::{AsyncRead, AsyncWrite};
#[inline]
fn current() -> u32 {
let timespec = time::get_time();
(timespec.sec * 1000 + timespec.nsec as i64 / 1000 / 1000) as u32
}
pub struct KcpOutput {
udp: Rc<UdpSocket>,
peer: SocketAddr,
}
impl Write for KcpOutput {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.udp.send_to(buf, &self.peer)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
struct KcpTimer {
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
timer: Rc<RefCell<Timeout>>,
}
impl Stream for KcpTimer {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
let mut timer = self.timer.borrow_mut();
match timer.poll() {
Ok(Async::Ready(())) => {
let mut kcp = self.kcp.borrow_mut();
kcp.update(current())?;
let dur = kcp.check(current());
let next = Instant::now() + Duration::from_millis(dur as u64);
timer.reset(next);
Ok(Async::Ready(Some(())))
}
Ok(Async::NotReady) => Ok(Async::NotReady),
Err(e) => Err(e),
}
}
}
struct KcpIo {
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
registration: Registration,
set_readiness: SetReadiness,
}
impl Read for KcpIo {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.kcp.borrow_mut().recv(buf)
}
}
impl Write for KcpIo {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let mut kcp = self.kcp.borrow_mut();
kcp.send(&mut BytesMut::from_buf(buf)).and_then(|n| {
kcp.flush().map(|_| n)
})
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl Evented for KcpIo {
fn register(
&self,
poll: &mio::Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
self.registration.register(poll, token, interest, opts)
}
fn reregister(
&self,
poll: &mio::Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
self.registration.reregister(poll, token, interest, opts)
}
fn deregister(&self, poll: &mio::Poll) -> io::Result<()> {
<Registration as Evented>::deregister(&self.registration, poll)
}
}
struct KcpClient {
socket: Rc<UdpSocket>,
buf: Box<[u8]>,
to_input: Option<(usize, SocketAddr)>,
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
set_readiness: SetReadiness,
timer: Rc<RefCell<Timeout>>,
}
impl Future for KcpClient {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<(), io::Error> {
loop {
if let Some((size, _peer)) = self.to_input.take() {
let current = current();
let mut kcp = self.kcp.borrow_mut();
kcp.input(&mut BytesMut::from_buf(&self.buf[..size]))?;
kcp.update(current)?;
let dur = kcp.check(current);
self.timer.borrow_mut().reset(
Instant::now() +
Duration::from_millis(dur as u64),
);
self.set_readiness.set_readiness(mio::Ready::readable())?;
}
self.to_input = Some(try_nb!(self.socket.recv_from(&mut self.buf)));
}
}
}
pub struct KcpStreamNew {
inner: Option<KcpStream>,
}
impl Future for KcpStreamNew {
type Item = KcpStream;
type Error = io::Error;
fn poll(&mut self) -> Poll<KcpStream, io::Error> {
Ok(Async::Ready(self.inner.take().unwrap()))
}
}
pub struct KcpStream {
io: PollEvented<KcpIo>,
}
impl KcpStream {
pub fn connect(addr: &SocketAddr, handle: &Handle) -> KcpStreamNew {
let local: SocketAddr = "0.0.0.0:0".parse().unwrap();
let udp = Rc::new(UdpSocket::bind(&local, handle).unwrap());
let kcp = Kcp::new(
rand::random::<u32>(),
KcpOutput {
udp: udp.clone(),
peer: *addr,
},
);
let kcp = Rc::new(RefCell::new(kcp));
let (registration, set_readiness) = Registration::new2();
let timer = Rc::new(RefCell::new(
Timeout::new_at(Instant::now(), handle).unwrap(),
));
let io = KcpIo {
kcp: kcp.clone(),
registration: registration,
set_readiness: set_readiness.clone(),
};
let interval = KcpTimer {
kcp: kcp.clone(),
timer: timer.clone(),
};
handle.spawn(interval.for_each(|_| Ok(())).then(|_| Ok(())));
let io = PollEvented::new(io, handle).unwrap();
let inner = KcpStream { io: io };
handle.spawn(
KcpClient {
socket: udp.clone(),
buf: Box::new([0; 1500]),
to_input: None,
kcp: kcp.clone(),
set_readiness: set_readiness.clone(),
timer: timer.clone(),
}.then(|_| Ok(())),
);
KcpStreamNew { inner: Some(inner) }
}
}
impl Read for KcpStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.io.read(buf)
}
}
impl Write for KcpStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.io.get_ref().set_readiness.set_readiness(
mio::Ready::writable(),
)?;
self.io.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.io.flush()
}
}
impl AsyncRead for KcpStream {}
impl AsyncWrite for KcpStream {
fn shutdown(&mut self) -> Poll<(), io::Error> {
Ok(().into())
}
}
struct KcpSession {
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
set_readiness: SetReadiness,
}
pub struct KcpListener {
udp: Rc<UdpSocket>,
sessions: HashMap<SocketAddr, KcpSession>,
handle: Handle,
}
pub struct Incoming {
inner: KcpListener,
}
impl Stream for Incoming {
type Item = (KcpStream, SocketAddr);
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, io::Error> {
Ok(Async::Ready(Some(try_nb!(self.inner.accept()))))
}
}
impl KcpListener {
pub fn bind(addr: &SocketAddr, handle: &Handle) -> io::Result<KcpListener> {
UdpSocket::bind(addr, handle).map(|udp| {
KcpListener {
udp: Rc::new(udp),
sessions: HashMap::new(),
handle: handle.clone(),
}
})
}
pub fn accept(&mut self) -> io::Result<(KcpStream, SocketAddr)> {
let mut buf = [0; 1500];
loop {
let (size, addr) = self.udp.recv_from(&mut buf)?;
if let Some(session) = self.sessions.get(&addr) {
session.kcp.borrow_mut().input(&mut BytesMut::from_buf(
&buf[..size],
))?;
session.set_readiness.set_readiness(mio::Ready::readable())?;
continue;
}
let kcp = Kcp::new(
get_conv(&buf),
KcpOutput {
udp: self.udp.clone(),
peer: addr,
},
);
let kcp = Rc::new(RefCell::new(kcp));
let (registration, set_readiness) = Registration::new2();
let timer = Rc::new(RefCell::new(Timeout::new_at(Instant::now(), &self.handle)?));
let io = KcpIo {
kcp: kcp.clone(),
registration: registration,
set_readiness: set_readiness.clone(),
};
let interval = KcpTimer {
kcp: kcp.clone(),
timer: timer.clone(),
};
self.handle.spawn(
interval.for_each(|_| Ok(())).then(|_| Ok(())),
);
let io = PollEvented::new(io, &self.handle).unwrap();
let stream = KcpStream { io: io };
stream.io.get_ref().kcp.borrow_mut().input(
&mut BytesMut::from_buf(&buf[..size]),
)?;
stream.io.get_ref().set_readiness.set_readiness(
mio::Ready::readable(),
)?;
let session = KcpSession {
kcp: kcp.clone(),
set_readiness: set_readiness.clone(),
};
self.sessions.insert(addr, session);
return Ok((stream, addr));
}
}
pub fn incoming(self) -> Incoming {
Incoming { inner: self }
}
}
use client stream instead of timer to input data
extern crate bytes;
extern crate futures;
extern crate kcp;
extern crate mio;
#[macro_use]
extern crate tokio_core;
extern crate tokio_io;
extern crate rand;
extern crate time;
use std::cell::RefCell;
use std::collections::HashMap;
use std::io::{self, Read, Write};
use std::net::SocketAddr;
use std::rc::Rc;
use std::time::{Duration, Instant};
use bytes::BytesMut;
use bytes::buf::FromBuf;
use futures::{Poll, Async, Future};
use futures::stream::Stream;
use kcp::prelude::*;
use mio::{Ready, Registration, PollOpt, Token, SetReadiness};
use mio::event::Evented;
use tokio_core::net::UdpSocket;
use tokio_core::reactor::{Handle, PollEvented, Timeout};
use tokio_io::{AsyncRead, AsyncWrite};
#[inline]
fn current() -> u32 {
let timespec = time::get_time();
(timespec.sec * 1000 + timespec.nsec as i64 / 1000 / 1000) as u32
}
pub struct KcpOutput {
udp: Rc<UdpSocket>,
peer: SocketAddr,
}
impl Write for KcpOutput {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.udp.send_to(buf, &self.peer)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
struct KcpTimer {
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
timer: Rc<RefCell<Timeout>>,
}
impl Stream for KcpTimer {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
let mut timer = self.timer.borrow_mut();
match timer.poll() {
Ok(Async::Ready(())) => {
let mut kcp = self.kcp.borrow_mut();
kcp.update(current())?;
let dur = kcp.check(current());
let next = Instant::now() + Duration::from_millis(dur as u64);
timer.reset(next);
Ok(Async::Ready(Some(())))
}
Ok(Async::NotReady) => Ok(Async::NotReady),
Err(e) => Err(e),
}
}
}
struct KcpIo {
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
registration: Registration,
set_readiness: SetReadiness,
}
impl Read for KcpIo {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.kcp.borrow_mut().recv(buf)
}
}
impl Write for KcpIo {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let mut kcp = self.kcp.borrow_mut();
kcp.send(&mut BytesMut::from_buf(buf)).and_then(|n| {
kcp.flush().map(|_| n)
})
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl Evented for KcpIo {
fn register(
&self,
poll: &mio::Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
self.registration.register(poll, token, interest, opts)
}
fn reregister(
&self,
poll: &mio::Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
self.registration.reregister(poll, token, interest, opts)
}
fn deregister(&self, poll: &mio::Poll) -> io::Result<()> {
<Registration as Evented>::deregister(&self.registration, poll)
}
}
pub struct KcpClientStream {
udp: Rc<UdpSocket>,
io: PollEvented<KcpIo>,
}
impl Read for KcpClientStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if let Ok((n, _)) = self.udp.recv_from(buf) {
self.io.get_ref().kcp.borrow_mut().input(
&mut BytesMut::from_buf(
&buf[..n],
),
)?;
self.io.get_ref().set_readiness.set_readiness(
mio::Ready::readable(),
)?;
}
self.io.read(buf)
}
}
impl AsyncRead for KcpClientStream {}
impl AsyncWrite for KcpClientStream {
fn shutdown(&mut self) -> Poll<(), io::Error> {
Ok(().into())
}
}
impl Write for KcpClientStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.io.get_ref().set_readiness.set_readiness(
mio::Ready::writable(),
)?;
self.io.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.io.flush()
}
}
pub struct KcpStreamNew {
inner: Option<KcpClientStream>,
}
impl Future for KcpStreamNew {
type Item = KcpClientStream;
type Error = io::Error;
fn poll(&mut self) -> Poll<KcpClientStream, io::Error> {
Ok(Async::Ready(self.inner.take().unwrap()))
}
}
pub struct KcpStream {
io: PollEvented<KcpIo>,
}
impl KcpStream {
pub fn connect(addr: &SocketAddr, handle: &Handle) -> KcpStreamNew {
let local: SocketAddr = "0.0.0.0:0".parse().unwrap();
let udp = Rc::new(UdpSocket::bind(&local, handle).unwrap());
let kcp = Kcp::new(
rand::random::<u32>(),
KcpOutput {
udp: udp.clone(),
peer: *addr,
},
);
let kcp = Rc::new(RefCell::new(kcp));
let (registration, set_readiness) = Registration::new2();
let timer = Rc::new(RefCell::new(
Timeout::new_at(Instant::now(), handle).unwrap(),
));
let io = KcpIo {
kcp: kcp.clone(),
registration: registration,
set_readiness: set_readiness.clone(),
};
let interval = KcpTimer {
kcp: kcp.clone(),
timer: timer.clone(),
};
handle.spawn(interval.for_each(|_| Ok(())).then(|_| Ok(())));
let io = PollEvented::new(io, handle).unwrap();
let inner = KcpClientStream { udp: udp, io: io };
KcpStreamNew { inner: Some(inner) }
}
}
impl Read for KcpStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.io.read(buf)
}
}
impl Write for KcpStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.io.get_ref().set_readiness.set_readiness(
mio::Ready::writable(),
)?;
self.io.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.io.flush()
}
}
impl AsyncRead for KcpStream {}
impl AsyncWrite for KcpStream {
fn shutdown(&mut self) -> Poll<(), io::Error> {
Ok(().into())
}
}
struct KcpSession {
kcp: Rc<RefCell<Kcp<KcpOutput>>>,
set_readiness: SetReadiness,
}
pub struct KcpListener {
udp: Rc<UdpSocket>,
sessions: HashMap<SocketAddr, KcpSession>,
handle: Handle,
}
pub struct Incoming {
inner: KcpListener,
}
impl Stream for Incoming {
type Item = (KcpStream, SocketAddr);
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, io::Error> {
Ok(Async::Ready(Some(try_nb!(self.inner.accept()))))
}
}
impl KcpListener {
pub fn bind(addr: &SocketAddr, handle: &Handle) -> io::Result<KcpListener> {
UdpSocket::bind(addr, handle).map(|udp| {
KcpListener {
udp: Rc::new(udp),
sessions: HashMap::new(),
handle: handle.clone(),
}
})
}
pub fn accept(&mut self) -> io::Result<(KcpStream, SocketAddr)> {
let mut buf = [0; 1500];
loop {
let (size, addr) = self.udp.recv_from(&mut buf)?;
if let Some(session) = self.sessions.get(&addr) {
session.kcp.borrow_mut().input(&mut BytesMut::from_buf(
&buf[..size],
))?;
session.set_readiness.set_readiness(mio::Ready::readable())?;
continue;
}
let kcp = Kcp::new(
get_conv(&buf),
KcpOutput {
udp: self.udp.clone(),
peer: addr,
},
);
let kcp = Rc::new(RefCell::new(kcp));
let (registration, set_readiness) = Registration::new2();
let timer = Rc::new(RefCell::new(Timeout::new_at(Instant::now(), &self.handle)?));
let io = KcpIo {
kcp: kcp.clone(),
registration: registration,
set_readiness: set_readiness.clone(),
};
let interval = KcpTimer {
kcp: kcp.clone(),
timer: timer.clone(),
};
self.handle.spawn(
interval.for_each(|_| Ok(())).then(|_| Ok(())),
);
let io = PollEvented::new(io, &self.handle).unwrap();
let stream = KcpStream { io: io };
stream.io.get_ref().kcp.borrow_mut().input(
&mut BytesMut::from_buf(&buf[..size]),
)?;
stream.io.get_ref().set_readiness.set_readiness(
mio::Ready::readable(),
)?;
let session = KcpSession {
kcp: kcp.clone(),
set_readiness: set_readiness.clone(),
};
self.sessions.insert(addr, session);
return Ok((stream, addr));
}
}
pub fn incoming(self) -> Incoming {
Incoming { inner: self }
}
}
|
//! A simple cross-platform library loader.
//!
//! ([crates.io](https://crates.io/crates/sharedlib)) ([github](https://github.com/Tyleo/sharedlib))
//!
//! Based on [libloading](https://crates.io/crates/libloading) by Simonas Kazlauskas.
//!
//! # Loading a library
//! To load a library you can use any of the [Lib](struct.Lib.html), [LibTracked](struct.LibTracked.html), or [LibUnsafe](struct.LibUnsafe.html) `structs`. Each of these `struct`s provides different guarantees. For more information about the guarantees they provide, see the [chosing your guarantees](index.html#choosing-your-guarantees) section, below. We use [Lib](struct.Lib.html) for the examples below.
//!
//! ### Calling a function in another library
//! ```no_run
//! unsafe {
//! let path_to_lib = "examplelib.dll";
//! let lib = try!(Lib::new(path_to_lib));
//! let hello_world_symbol: Func<extern "C" fn()> = try!(lib.find_func("hello_world"));
//! let hello_world = hello_world_symbol.get();
//! hello_world();
//! }
//! ```
//!
//! ### Accessing data in another library
//! ```no_run
//! unsafe {
//! let path_to_lib = "examplelib.dll";
//! let lib = try!(Lib::new(path_to_lib));
//! let my_usize_symbol: Data<usize> = try!(lib.find_data("my_usize"));
//! let my_usize = my_usize_symbol.get();
//! assert_eq!(*my_usize, 0);
//! }
//! ```
//!
//! ### Choosing your guarantees
//! A common problem when loading a shared library at runtime is that a symbol may be accessed after its library has been unloaded. [sharedlib](index.html) attempts to prevent this by allowing the lifetime of the library to be tracked. Each of the different libraries, [Lib](struct.Lib.html), [LibTracked](struct.LibTracked.html), or [LibUnsafe](struct.LibUnsafe.html), provides a different tracking mechanism. Below is a small overview. For more information, see the struct level documentation.
//!
//! * [LibUnsafe](struct.LibUnsafe.html) does not provide any tracking at all. This requires no overhead but responsibility falls on the client to be sure that the library is still alive when its symbols are used.
//!
//! * [Lib](struct.Lib.html) attaches its own lifetime to each symbol it returns. This requires no overhead but it can be difficult to store the returned symbol in a `struct` because the `struct` must have a trackable lifetime which outlives the [Lib](struct.Lib.html). In other words, a struct containing a symbol must parameterize around some lifetime `a`, where `a` is less than or equal to the lifetime of the library.
//!
//! * [LibTracked](struct.LibTracked.html) returns symbols with ref-counts to the library. This requires overhead but it allows the returned symbol to be stored easily. Additionally, this `struct` is generic and can be used with `Rc`, `Arc`, or a user provided ref-count type.
//!
//! # Pitfalls
//! While [sharedlib](index.html) attempts to prevent undefined behavior, loading shared libraries is inherently unsafe. Below are some tips which you may find helpful so that your code is not exposed to undefined behavior.
//!
//! ### Avoid copying or moving data returned from `get()`
//! The [get](trait.Symbol.html#method.get) method on [Symbol](trait.Symbol.html) returns a transmuted pointer to something in a loaded library. While [sharedlib](index.html) tries to make sure that this pointer cannot outlive the library it is from, full protection is impossible. In particular: if a loaded `struct` contains pointers to things in the loaded library, and the loaded `struct` implements `Clone`, clients can clone the `struct` and make it to live longer than the library it is from. If this happens the pointers in the `struct` dangle. The example below demonstrate:
//!
//! ```no_run
//! unsafe {
//! let some_func = {
//! let lib = try!(Lib::new("examplelib.dll"));
//! let some_func_symbol: Func<extern "C" fn()> = try!(lib.find_func(b"some_func"));
//! // All func pointers implement `Copy` so we can duplicate one.
//! some_func_symbol.get()
//! // lib goes out of scope here.
//! };
//! // Undefined behavior
//! some_func();
//! }
//! ```
//! ### Use the correct method when getting functions or data
//! Each library provides two different ways to get symbols from shared libraries. One way is `find_func`, and the other is `find_data`. Two functions are provded because `find_data` needs to return a reference to a `T` rather than a `T` itself, while `find_func` just needs to return a `T` itself. Returning the wrong thing can cause some complications. For instance: suppose we only have the `find_data` method, and we want to get a function pointer with the signature `fn()`. We are inclined to call `lib.find_data::<fn()>(b"some_func")`. This searches the memory of the loaded binary and finds the address of the first line of the function `some_func`. Next, the *contents* of the first line of `some_func` are treated as a function pointer rather than the *address* of the first line of `some_func`. When the first line of `some_func` is returned it is incorrectly cast into a function pointer. Calling it produces undefined behavior. The example below demonstrates:
//!
//! ```no_run
//! unsafe {
//! let lib = try!(Lib::new("examplelib.dll"));
//! let some_func_symbol: Data<extern "C" fn()> = try!(lib.find_data(b"some_func"));
//! // some_func actually points to a function but rust thinks it points to a function pointer.
//! let some_func = some_func_symbol.get();
//! // Undefined behavior
//! some_func();
//! }
//! ```
//!
//! The correct way to do this with `find_data` is as follows:
//!
//! ```no_run
//! unsafe {
//! let lib = try!(Lib::new("examplelib.dll"));
//! // Get a pointer to the block of memory at "some_func", this is the function itself.
//! let some_func_symbol: Data<u8> = try!(lib.find_data(b"some_func"));
//! // The type of some_func is &u8, a reference to the first byte of `some_func`. We can convert this into a function pointer.
//! let some_func = some_func_symbol.get();
//! let some_func_ptr: extern "C" fn() = std::mem::transmute(some_func);
//! // This works now.
//! some_func_ptr();
//! }
//! ```
//!
//! For convienience, the second example is provided as the `find_func` method, which does this error-prone conversion behind the scenes.
//!
//! # Comparison with other crates for loabing shared libraries
//! sharedlib was created out of frusteration with the existing crates for loading shared libraries. Below is a list of some of these crates with some information abuot how sharedlib improves upon them.
//!
//! * [dylib](https://crates.io/crates/dylib) provides an extremely simple interface for loading shared libraries. For awhile, this was the standard for loading shared libraries at runtime. Unfortunately, development on dylib has been mostly abandoned and it is no longer supported on the latest versions of the rust compiler.
//!
//! * [libloading](https://crates.io/crates/libloading) provides some additional safety guarantees on top of [dylib](https://crates.io/crates/dylib). [sharedlib](index.html) even started as a fork of [libloading](https://crates.io/crates/libloading). Unfortunately the interface [libloading](https://crates.io/crates/libloading) provides is extremely inflexible, requiring clients to transmute symbols so they can be used in `struct`s. Additionally, loading data does not work with this library which is a non-starter for many projects.
//!
//! # Frequently asked questions
//!
//! ### What is a shared library?
//! A shared library is a set of functions and variables which can be loaded after a program has been compiled. By loading a library after compilation, the library can be recompiled or changed without recompiling the main program. Shared libraries can even be loaded at runtime. Common shared library filetypes are *.dll* for windows, *.so* for unix, and *.dylib* for osx. For more information about what a shared library is, see [wikipedia](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries).
//!
//! ### Doesn't rust already provide linking against shared libraries?
//! While rust provides linking against shared libraries, it does not provide the ability to load them at runtime. If you only want to use shared libraries that you know about before runtime, you may find not find this crate very useful. On the other hand, if you wish to load something at runtime, like a plugin, you are in the right place.
#[macro_use]
extern crate define_error;
#[macro_use]
extern crate lazy_static;
#[cfg(windows)]
extern crate kernel32;
#[cfg(windows)]
extern crate winapi;
pub mod error;
mod os;
mod lib_impl;
mod string;
mod symbol;
#[cfg(test)]
mod test;
mod util;
pub use error::SharedlibError;
pub use error::SharedlibResult;
pub use lib_impl::Lib;
pub use lib_impl::LibArc;
pub use lib_impl::LibRc;
pub use lib_impl::LibTracked;
pub use lib_impl::LibUnsafe;
pub use symbol::Data;
pub use symbol::DataArc;
pub use symbol::DataRc;
pub use symbol::DataTracked;
pub use symbol::DataUnsafe;
pub use symbol::Func;
pub use symbol::FuncArc;
pub use symbol::FuncRc;
pub use symbol::FuncTracked;
pub use symbol::FuncUnsafe;
pub use symbol::Symbol;
Temp disable doc tests.
//! A simple cross-platform library loader.
//!
//! ([crates.io](https://crates.io/crates/sharedlib)) ([github](https://github.com/Tyleo/sharedlib))
//!
//! Based on [libloading](https://crates.io/crates/libloading) by Simonas Kazlauskas.
//!
//! # Loading a library
//! To load a library you can use any of the [Lib](struct.Lib.html), [LibTracked](struct.LibTracked.html), or [LibUnsafe](struct.LibUnsafe.html) `structs`. Each of these `struct`s provides different guarantees. For more information about the guarantees they provide, see the [chosing your guarantees](index.html#choosing-your-guarantees) section, below. We use [Lib](struct.Lib.html) for the examples below.
//!
//! ### Calling a function in another library
//! ```norun
//! unsafe {
//! let path_to_lib = "examplelib.dll";
//! let lib = try!(Lib::new(path_to_lib));
//! let hello_world_symbol: Func<extern "C" fn()> = try!(lib.find_func("hello_world"));
//! let hello_world = hello_world_symbol.get();
//! hello_world();
//! }
//! ```
//!
//! ### Accessing data in another library
//! ```norun
//! unsafe {
//! let path_to_lib = "examplelib.dll";
//! let lib = try!(Lib::new(path_to_lib));
//! let my_usize_symbol: Data<usize> = try!(lib.find_data("my_usize"));
//! let my_usize = my_usize_symbol.get();
//! assert_eq!(*my_usize, 0);
//! }
//! ```
//!
//! ### Choosing your guarantees
//! A common problem when loading a shared library at runtime is that a symbol may be accessed after its library has been unloaded. [sharedlib](index.html) attempts to prevent this by allowing the lifetime of the library to be tracked. Each of the different libraries, [Lib](struct.Lib.html), [LibTracked](struct.LibTracked.html), or [LibUnsafe](struct.LibUnsafe.html), provides a different tracking mechanism. Below is a small overview. For more information, see the struct level documentation.
//!
//! * [LibUnsafe](struct.LibUnsafe.html) does not provide any tracking at all. This requires no overhead but responsibility falls on the client to be sure that the library is still alive when its symbols are used.
//!
//! * [Lib](struct.Lib.html) attaches its own lifetime to each symbol it returns. This requires no overhead but it can be difficult to store the returned symbol in a `struct` because the `struct` must have a trackable lifetime which outlives the [Lib](struct.Lib.html). In other words, a struct containing a symbol must parameterize around some lifetime `a`, where `a` is less than or equal to the lifetime of the library.
//!
//! * [LibTracked](struct.LibTracked.html) returns symbols with ref-counts to the library. This requires overhead but it allows the returned symbol to be stored easily. Additionally, this `struct` is generic and can be used with `Rc`, `Arc`, or a user provided ref-count type.
//!
//! # Pitfalls
//! While [sharedlib](index.html) attempts to prevent undefined behavior, loading shared libraries is inherently unsafe. Below are some tips which you may find helpful so that your code is not exposed to undefined behavior.
//!
//! ### Avoid copying or moving data returned from `get()`
//! The [get](trait.Symbol.html#method.get) method on [Symbol](trait.Symbol.html) returns a transmuted pointer to something in a loaded library. While [sharedlib](index.html) tries to make sure that this pointer cannot outlive the library it is from, full protection is impossible. In particular: if a loaded `struct` contains pointers to things in the loaded library, and the loaded `struct` implements `Clone`, clients can clone the `struct` and make it to live longer than the library it is from. If this happens the pointers in the `struct` dangle. The example below demonstrate:
//!
//! ```norun
//! unsafe {
//! let some_func = {
//! let lib = try!(Lib::new("examplelib.dll"));
//! let some_func_symbol: Func<extern "C" fn()> = try!(lib.find_func(b"some_func"));
//! // All func pointers implement `Copy` so we can duplicate one.
//! some_func_symbol.get()
//! // lib goes out of scope here.
//! };
//! // Undefined behavior
//! some_func();
//! }
//! ```
//! ### Use the correct method when getting functions or data
//! Each library provides two different ways to get symbols from shared libraries. One way is `find_func`, and the other is `find_data`. Two functions are provded because `find_data` needs to return a reference to a `T` rather than a `T` itself, while `find_func` just needs to return a `T` itself. Returning the wrong thing can cause some complications. For instance: suppose we only have the `find_data` method, and we want to get a function pointer with the signature `fn()`. We are inclined to call `lib.find_data::<fn()>(b"some_func")`. This searches the memory of the loaded binary and finds the address of the first line of the function `some_func`. Next, the *contents* of the first line of `some_func` are treated as a function pointer rather than the *address* of the first line of `some_func`. When the first line of `some_func` is returned it is incorrectly cast into a function pointer. Calling it produces undefined behavior. The example below demonstrates:
//!
//! ```norun
//! unsafe {
//! let lib = try!(Lib::new("examplelib.dll"));
//! let some_func_symbol: Data<extern "C" fn()> = try!(lib.find_data(b"some_func"));
//! // some_func actually points to a function but rust thinks it points to a function pointer.
//! let some_func = some_func_symbol.get();
//! // Undefined behavior
//! some_func();
//! }
//! ```
//!
//! The correct way to do this with `find_data` is as follows:
//!
//! ```norun
//! unsafe {
//! let lib = try!(Lib::new("examplelib.dll"));
//! // Get a pointer to the block of memory at "some_func", this is the function itself.
//! let some_func_symbol: Data<u8> = try!(lib.find_data(b"some_func"));
//! // The type of some_func is &u8, a reference to the first byte of `some_func`. We can convert this into a function pointer.
//! let some_func = some_func_symbol.get();
//! let some_func_ptr: extern "C" fn() = std::mem::transmute(some_func);
//! // This works now.
//! some_func_ptr();
//! }
//! ```
//!
//! For convienience, the second example is provided as the `find_func` method, which does this error-prone conversion behind the scenes.
//!
//! # Comparison with other crates for loabing shared libraries
//! sharedlib was created out of frusteration with the existing crates for loading shared libraries. Below is a list of some of these crates with some information abuot how sharedlib improves upon them.
//!
//! * [dylib](https://crates.io/crates/dylib) provides an extremely simple interface for loading shared libraries. For awhile, this was the standard for loading shared libraries at runtime. Unfortunately, development on dylib has been mostly abandoned and it is no longer supported on the latest versions of the rust compiler.
//!
//! * [libloading](https://crates.io/crates/libloading) provides some additional safety guarantees on top of [dylib](https://crates.io/crates/dylib). [sharedlib](index.html) even started as a fork of [libloading](https://crates.io/crates/libloading). Unfortunately the interface [libloading](https://crates.io/crates/libloading) provides is extremely inflexible, requiring clients to transmute symbols so they can be used in `struct`s. Additionally, loading data does not work with this library which is a non-starter for many projects.
//!
//! # Frequently asked questions
//!
//! ### What is a shared library?
//! A shared library is a set of functions and variables which can be loaded after a program has been compiled. By loading a library after compilation, the library can be recompiled or changed without recompiling the main program. Shared libraries can even be loaded at runtime. Common shared library filetypes are *.dll* for windows, *.so* for unix, and *.dylib* for osx. For more information about what a shared library is, see [wikipedia](https://en.wikipedia.org/wiki/Library_(computing)#Shared_libraries).
//!
//! ### Doesn't rust already provide linking against shared libraries?
//! While rust provides linking against shared libraries, it does not provide the ability to load them at runtime. If you only want to use shared libraries that you know about before runtime, you may find not find this crate very useful. On the other hand, if you wish to load something at runtime, like a plugin, you are in the right place.
#[macro_use]
extern crate define_error;
#[macro_use]
extern crate lazy_static;
#[cfg(windows)]
extern crate kernel32;
#[cfg(windows)]
extern crate winapi;
pub mod error;
mod os;
mod lib_impl;
mod string;
mod symbol;
#[cfg(test)]
mod test;
mod util;
pub use error::SharedlibError;
pub use error::SharedlibResult;
pub use lib_impl::Lib;
pub use lib_impl::LibArc;
pub use lib_impl::LibRc;
pub use lib_impl::LibTracked;
pub use lib_impl::LibUnsafe;
pub use symbol::Data;
pub use symbol::DataArc;
pub use symbol::DataRc;
pub use symbol::DataTracked;
pub use symbol::DataUnsafe;
pub use symbol::Func;
pub use symbol::FuncArc;
pub use symbol::FuncRc;
pub use symbol::FuncTracked;
pub use symbol::FuncUnsafe;
pub use symbol::Symbol;
|
//! Lua 5.1 bindings for Rust
#![crate_name = "lua"]
#![crate_type = "rlib"]
#![warn(missing_docs)]
#![allow(non_snake_case)]
#![feature(libc,core,std_misc,path,unicode,unsafe_no_drop_flag)]
extern crate libc;
use libc::c_int;
use std::{fmt, mem, ptr, str, slice};
use std::old_path as path;
use std::ffi::{self, CString};
use std::marker;
use std::num::SignedInt;
/// Human-readable major version string
pub const VERSION: &'static str = config::LUA_VERSION;
/// Human-readable release version string
pub const RELEASE: &'static str = config::LUA_RELEASE;
/// Machine-readable version number
pub const VERSION_NUM: isize = config::LUA_VERSION_NUM as isize;
/// Value for lua_call that means return all results
pub const MULTRET: i32 = raw::MULTRET as i32;
/// Minimum Lua stack available to a C function
pub const MINSTACK: i32 = config::LUA_MINSTACK as i32;
/// Pseudo-index for the registry
pub const REGISTRYINDEX: i32 = raw::LUA_REGISTRYINDEX as i32;
/// Pseudo-index for the thread environment
pub const GLOBALSINDEX: i32 = raw::LUA_GLOBALSINDEX as i32;
/// Pseudo-index for the running C function environment
pub const ENVIRONINDEX: i32 = raw::LUA_ENVIRONINDEX as i32;
/// Calculates the pseudo-index for the upvalue at the given index.
/// Any index in the range [1,256] produces an acceptable index.
/// Any index outside that range will likely produce an unacceptable index.
pub fn upvalueindex(n: i32) -> i32 {
#![inline]
raw::lua_upvalueindex(n as c_int) as i32
}
include!(concat!(env!("OUT_DIR"), "/config.rs"));
#[allow(missing_docs)]
pub mod raw;
#[allow(missing_docs)]
#[path="aux_.rs"]
pub mod aux;
#[path = "lualib.rs"]
#[allow(missing_docs)]
pub mod lib;
#[path="macro.rs"]
mod macros;
#[cfg(test)]
mod tests;
macro_rules! luaassert{
($state:expr, $cond:expr, $msg:expr) => {
if !$cond {
$state.errorstr($msg.as_slice());
}
};
($state:expr, $cond:expr, $($arg:expr),+) => {
if !$cond {
let msg = format!($($arg),+);
$state.errorstr(msg.as_slice());
}
}
}
/// Lua value types
#[derive(Clone,Copy,PartialEq,Eq,Debug)]
pub enum Type {
/// Type for nil
Nil = raw::LUA_TNIL as isize,
/// Type for booleans
Boolean = raw::LUA_TBOOLEAN as isize,
/// Type for light userdata
LightUserdata = raw::LUA_TLIGHTUSERDATA as isize,
/// Type for numbers
Number = raw::LUA_TNUMBER as isize,
/// Type for strings
String = raw::LUA_TSTRING as isize,
/// Type for tables
Table = raw::LUA_TTABLE as isize,
/// Type for functions
Function = raw::LUA_TFUNCTION as isize,
/// Type for userdata
Userdata = raw::LUA_TUSERDATA as isize,
/// Type for threads
Thread = raw::LUA_TTHREAD as isize
}
impl Type {
/// Returns the name of the type
pub fn name(&self) -> &'static str {
unsafe {
// NB: lua_typename() doesn't actually use its state parameter
let s = raw::lua_typename(ptr::null_mut(), *self as libc::c_int);
mem::transmute::<&str,&'static str>(str::from_utf8(ffi::c_str_to_bytes(&s)).unwrap())
}
}
}
/// Garbage collection options (used with State.gc())
//#[allow(dead_code)] // FIXME(rust-lang/rust#17632): dead_code warning is wrong here
#[derive(Copy)]
pub enum GC {
/// Stops the garbage collector
Stop = raw::LUA_GCSTOP as isize,
/// Restarts the garbage collector
Restart = raw::LUA_GCRESTART as isize,
/// Performs a full garbage-collection cycle
Collect = raw::LUA_GCCOLLECT as isize,
/// Returns the current amount of memory (in Kbytes) in use by Lua
Count = raw::LUA_GCCOUNT as isize,
/// Returns the remainder of dividing the current amount of bytes in memory in use by Lua
/// by 1024
CountB = raw::LUA_GCCOUNTB as isize,
/// Performs an incremental step of garbage collection. The step "size" is controlled by
/// `data` (larger values mean more steps) in a non-specified way. If you want to control
/// the step size you must experimentally tune hte value of `data`. The function returns
/// 1 if the step finished a garbage-collection cycle.
Step = raw::LUA_GCSTEP as isize,
/// Sets `data` as the new value for the pause of the collector. The function returns the
/// previous value of the pause.
SetPause = raw::LUA_GCSETPAUSE as isize,
/// Sets `data` as the new value for the step multiplier of the collector. The function
/// returns the previous value of the step multiplier.
SetStepMul = raw::LUA_GCSETSTEPMUL as isize
}
/// Type that represents C functions that can be registered with Lua.
pub type CFunction = raw::lua_CFunction;
/// Function type for reading blocks when loading Lua chunks.
pub type Reader = raw::lua_Reader;
/// Function type for writing blocks when dumping Lua chunks.
pub type Writer = raw::lua_Writer;
/// Type that represents memory-allocation functions
pub type Alloc = raw::lua_Alloc;
/// State.load() errors
#[derive(Copy)]
pub enum LoadError {
/// Syntax error during pre-compilation
ErrSyntax = raw::LUA_ERRSYNTAX as isize,
/// Memory allocation error
ErrMem = raw::LUA_ERRMEM as isize
}
impl fmt::Debug for LoadError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LoadError::ErrSyntax => f.pad("syntax error"),
LoadError::ErrMem => f.pad("memory allocation error")
}
}
}
/// State.loadfile() errors
#[derive(Copy)]
pub enum LoadFileError {
/// Syntax error during pre-compilation
ErrSyntax = raw::LUA_ERRSYNTAX as isize,
/// Memory allocation error
ErrMem = raw::LUA_ERRMEM as isize,
/// Cannot read/open the file
ErrFile = aux::raw::LUA_ERRFILE as isize
}
impl fmt::Debug for LoadFileError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LoadFileError::ErrSyntax => f.pad("syntax error"),
LoadFileError::ErrMem => f.pad("memory allocation error"),
LoadFileError::ErrFile => f.pad("file read/open error")
}
}
}
/// State.pcall() errors
#[derive(Copy)]
pub enum PCallError {
/// Runtime error
ErrRun = raw::LUA_ERRRUN as isize,
/// Memory allocation error
ErrMem = raw::LUA_ERRMEM as isize,
/// Error while running the error handler function
ErrErr = raw::LUA_ERRERR as isize
}
impl PCallError {
/// Converts an error code from `lua_pcall()` into a PCallError
pub fn from_code(code: c_int) -> Option<PCallError> {
match code {
raw::LUA_ERRRUN => Some(PCallError::ErrRun),
raw::LUA_ERRMEM => Some(PCallError::ErrMem),
raw::LUA_ERRERR => Some(PCallError::ErrErr),
_ => None,
}
}
}
impl fmt::Debug for PCallError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
PCallError::ErrRun => f.pad("runtime error"),
PCallError::ErrMem => f.pad("memory allocation error"),
PCallError::ErrErr => f.pad("error handler func error")
}
}
}
/// The Lua state.
/// Every Lua thread is represented by a separate State.
///
/// When executing functions on the State that take acceptable indexes, these
/// indexes are checked to ensure they are within the stack space defined by
/// the last call to State.checkstack(). If they are not acceptable, the
/// function fails without calling lua_checkstack(). Negative indices are
/// checked against the current top of the stack instead of the stack space.
///
/// Unless otherwise noted, all safe functions that take indexes will fail if
/// the index is not acceptable.
///
/// There are two variant state types, ExternState and RawState, that assume
/// different behavior for thrown errors. ExternState is meant for functions
/// that are executing in a protected scope (see pcall()), and RawState is
/// meant for omitting safety in favor of performance.
///
/// Note that it is completely unsafe to pass a reference to State to a
/// function that is executing in a protected scope. Use ExternState for that.
#[unsafe_no_drop_flag]
#[repr(C)]
pub struct State {
L: *mut raw::lua_State,
_stackspace: i32,
_marker: marker::PhantomData<raw::lua_State>
}
impl Drop for State {
fn drop(&mut self) {
if !self.L.is_null() {
unsafe {
raw::lua_close(self.L);
}
self.L = ptr::null_mut();
}
}
}
/// ExternState is a Lua State that was created from a raw::lua_State value.
/// Every error-throwing function is assumed to be using longjmp instead of
/// task failure.
///
/// See State for more information.
// NB: layout must be identical to State
// If Drop is ever implemented, add unsafe_no_drop_flag
#[repr(C)]
pub struct ExternState<'a> {
L: *mut raw::lua_State,
stackspace: i32,
_marker: marker::PhantomData<&'a mut raw::lua_State>
}
/// RawState is a Lua State that represents raw, unchecked access. All
/// functions eschew safety in favor of speed. Like ExternState, all
/// error-throwing functions are assumed to be using longjmp.
// NB: layout must be identical to State
// If Drop is ever implemented, add unsafe_no_drop_flag
#[repr(C)]
pub struct RawState<'a> {
L: *mut raw::lua_State,
stackspace: i32,
_marker: marker::PhantomData<&'a mut raw::lua_State>
}
// State construction
impl State {
/// Returns a new State, or fails if memory cannot be allocated for the state
pub fn new() -> State {
#![inline]
State::new_opt().unwrap()
}
/// Returns a new State, or None if memory cannot be allocated for the state
pub fn new_opt() -> Option<State> {
return unsafe {
let L = raw::lua_newstate(alloc, ptr::null_mut());
if !L.is_null() {
raw::lua_atpanic(L, panic);
Some(State{ L: L, _stackspace: MINSTACK, _marker: marker::PhantomData })
} else {
None
}
};
extern "C" fn alloc(_ud: *mut libc::c_void, ptr: *mut libc::c_void, _osize: libc::size_t,
nsize: libc::size_t) -> *mut libc::c_void {
unsafe {
if nsize == 0 {
libc::free(ptr as *mut libc::c_void);
ptr::null_mut()
} else {
libc::realloc(ptr, nsize)
}
}
}
extern "C" fn panic(L: *mut raw::lua_State) -> c_int {
unsafe {
let s = RawState::from_lua_State(L).describe_(-1, false);
panic!("unprotected error in call to Lua API ({})", s);
}
}
}
}
impl<'l> ExternState<'l> {
/// Wraps a *raw::lua_State in a ExternState.
pub unsafe fn from_lua_State(L: *mut raw::lua_State) -> ExternState<'static> {
#![inline]
ExternState{ L: L, stackspace: MINSTACK, _marker: marker::PhantomData }
}
}
impl<'l> RawState<'l> {
/// Wraps a *raw::lua_State in a RawState.
pub unsafe fn from_lua_State(L: *mut raw::lua_State) -> RawState<'static> {
#![inline]
RawState{ L: L, stackspace: MINSTACK, _marker: marker::PhantomData }
}
}
// State conversion
impl State {
/// Returns the same state as an ExternState
pub fn as_extern<'a>(&'a mut self) -> &'a mut ExternState<'a> {
#![inline]
unsafe { mem::transmute(self) }
}
/// Returns the same state as a RawState
pub fn as_raw<'a>(&'a mut self) -> &'a mut RawState<'a> {
#![inline]
unsafe { mem::transmute(self) }
}
}
impl<'a> ExternState<'a> {
/// Returns the same state as a RawState
pub fn as_raw(&mut self) -> &'a mut RawState<'a> {
#![inline]
unsafe { mem::transmute(self) }
}
}
impl State {
/// Provides unsafe access to the underlying *lua_State
pub unsafe fn get_lua_State(&mut self) -> *mut raw::lua_State {
#![inline]
self.L
}
}
impl<'l> ExternState<'l> {
/// Provides unsafe access to the underlying *lua_State
pub unsafe fn get_lua_State(&mut self) -> *mut raw::lua_State {
#![inline]
self.L
}
}
impl<'l> RawState<'l> {
/// Provides unsafe access to the underlying *lua_State
pub unsafe fn get_lua_State(&mut self) -> *mut raw::lua_State {
#![inline]
self.L
}
}
impl State {
/// Creates a new thread, pushes it on the stack, and returns a `State`
/// that represents this new thread. The new state returned by this
/// function shares with the original state all global objects (such as
/// tables), but has an independent execution stack.
///
/// This new state does not get explicitly closed. Threads are subject to
/// garbage collection, like any Lua object.
pub fn newthread(&mut self) -> State {
#![inline(always)]
unsafe { self.as_raw().newthread() }
}
/// Sets a new panic function and returns the old one.
///
/// The panic function can access the error message at the top of the stack.
///
/// The default panic function installed by this library calls panic!() with
/// the error message. Your panic function should either call through to
/// the default one, or should panic!() itself. Otherwise, the application
/// will be terminated.
pub unsafe fn atpanic(&mut self, panicf: CFunction) -> CFunction {
#![inline(always)]
self.as_raw().atpanic(panicf)
}
/// Returns the textual description of the value at the given acceptable index.
/// Returns "" if the given index is non-valid.
pub fn describe(&mut self, idx: i32) -> String {
#![inline(always)]
unsafe { self.as_extern().describe(idx) }
}
/// Variant of describe_() that does not push on to the stack. describe()
/// may push new values onto the stack temporarily. Notably, it may do this
/// to avoid converting the existing value's type. This method allows this
/// behavior to be disabled. If usestack is true, this method may require 1
/// free slot on the stack.
pub fn describe_(&mut self, idx: i32, usestack: bool) -> String {
#![inline(always)]
unsafe { self.as_extern().describe_(idx, usestack) }
}
/// Returns the index of the top element of the stack.
/// Indexes start at 1. 0 means the stack is empty.
pub fn gettop(&mut self) -> i32 {
#![inline(always)]
self.as_extern().gettop()
}
/// Sets the stack top to the given acceptable index, or 0.
/// If the new top is larger than the old one, new elements are filled with
/// nil.
/// If the index is 0, all stack elements are removed.
pub fn settop(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().settop(idx) }
}
/// Pushes a copy of the element at the given valid index onto the stack.
pub fn pushvalue(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().pushvalue(idx) }
}
/// Removes the element at the given valid index, shifting other elements
/// as needed.
/// Pseudo-indices are not valid for this call.
pub fn remove(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().remove(idx) }
}
/// Moves the top element into the given valid index, shifting existing
/// elements as needed.
/// Pseudo-indices are not valid for this call.
pub fn insert(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().insert(idx) }
}
/// Moves the top element into the given valid index and replaces the
/// existing value, without shifting any other elements.
pub fn replace(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().replace(idx) }
}
/// Ensures the stack contains at least `extra` free slots on the stack.
/// Returns false if it cannot grow the stack as requested.
pub fn checkstack(&mut self, extra: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().checkstack(extra) }
}
/// Ensures the stack contains at least `extra` free slots on the stack.
/// Throws an error if it cannot grow the stack.
pub fn checkstack_(&mut self, extra: i32) {
#![inline(always)]
unsafe { self.as_extern().checkstack_(extra) }
}
/// Exchanges values between different threads of the same global state.
/// This method pops n values from the stack `self`, and pushes them to the
/// stack `to`.
///
/// Note: this method is unsafe because it cannot check to ensure that both
/// threads belong to the same global state.
///
/// Despite being unsafe, it still checks the validity of `n`.
pub unsafe fn xmove(&mut self, to: &mut State, n: i32) {
#![inline(always)]
self.as_extern().xmove(to.as_extern(), n)
}
/// Returns `true` if the value at the given acceptable index is a number,
/// or a string convertible to a number.
pub fn isnumber(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnumber(idx) }
}
/// Returns `true` if the value at the given acceptable index is a string
/// or a number (which is always convertible to a string).
pub fn isstring(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isstring(idx) }
}
/// Returns `true` if the value at the given acceptable index is a C
/// function.
pub fn iscfunction(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().iscfunction(idx) }
}
/// Returns `true` if the value at the given acceptable index is a userdata
/// (either full or light).
pub fn isuserdata(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isuserdata(idx) }
}
/// Returns the type of the value at the given acceptable index. If the
/// given index is non-valid, returns None.
pub fn type_(&mut self, idx: i32) -> Option<Type> {
#![inline(always)]
unsafe { self.as_extern().type_(idx) }
}
/// Returns the name of the type of the value at the given acceptable
/// index.
pub fn typename(&mut self, idx: i32) -> &'static str {
#![inline(always)]
unsafe { self.as_extern().typename(idx) }
}
/// Returns `true` if the two values in acceptable indices `index1` and
/// `index2` are equal, following the semantics of the Lua == operator.
/// Returns `false` if any indices are non-valid.
pub fn equal(&mut self, index1: i32, index2: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().equal(index1, index2) }
}
/// Returns `true` if the two values in acceptable indices `index1` and
/// `index2` are primitively equal (that is, without calling any
/// metamethods). Returns `false` if any indices are non-valid.
pub fn rawequal(&mut self, index1: i32, index2: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().rawequal(index1, index2) }
}
/// Returns `true` if the value at acceptable index `index1` is smaller
/// than the value at acceptable index `index2`, following the semantics of
/// the Lua < operator. Returns `false` if any indices are non-valid.
pub fn lessthan(&mut self, index1: i32, index2: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().lessthan(index1, index2) }
}
/// Converts the Lua value at the given acceptable index to a f64. The Lua
/// value must be a number or a string convertible to a number; otherwise,
/// tonumber returns 0.
pub fn tonumber(&mut self, idx: i32) -> f64 {
#![inline(always)]
unsafe { self.as_extern().tonumber(idx) }
}
/// Converts the Lua value at the given acceptable index to an isize. The Lua
/// value must be a number or a string convertiable to a number; otherwise,
/// toint returns 0.
pub fn tointeger(&mut self, idx: i32) -> isize {
#![inline(always)]
unsafe { self.as_extern().tointeger(idx) }
}
/// Converts the value at the given acceptable index to a bool.
/// Returns false when called with a non-valid index.
pub fn toboolean(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().toboolean(idx) }
}
/// Converts the value at the given acceptable index to a string.
///
/// Returns None if the value is not a number or a string.
/// Returns None if the string value is not utf-8.
///
/// Note: if the value is a number, this method changes the value in the
/// stack to a string. This may confuse lua_next if this is called during
/// table traversal.
pub fn tostring<'a>(&'a mut self, idx: i32) -> Option<&'a str> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().tostring(idx)) }
}
/// Converts the value at the given acceptable index into a lua string, and
/// returns it as a byte vector.
/// Returns None if the value is not a number or a string.
/// See tostring() for caveats.
pub fn tobytes<'a>(&'a mut self, idx: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().tobytes(idx)) }
}
/// Returns the "length" of the value at the given acceptable index.
pub fn objlen(&mut self, idx: i32) -> usize {
#![inline(always)]
unsafe { self.as_extern().objlen(idx) }
}
/// Converts a value at the given acceptable index to a C function. The
/// value must be a C function; otherwise, returns None.
pub fn tocfunction(&mut self, idx: i32) -> Option<CFunction> {
#![inline(always)]
unsafe { self.as_extern().tocfunction(idx) }
}
/// If the value at the given acceptable index is a full userdata, returns
/// its block address. If the value is a light userdata, returns its
/// pointer. Otherwise, returns ptr::null().
pub fn touserdata(&mut self, idx: i32) -> *mut libc::c_void {
#![inline(always)]
unsafe { self.as_extern().touserdata(idx) }
}
/// Converts the value at the given acceptable index to a Lua thread
/// (represented as a State). This value must be a thread; otherwise, the
/// method returns None.
///
/// Note: the State return value does not make any assumptions about the
/// available stack space. .checkstack() must be called in order to
/// consider any non-valid index as acceptable.
pub fn tothread(&mut self, idx: i32) -> Option<State> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().tothread(idx)) }
}
/// Converts the value at the given acceptable index to a pointer. The
/// value can be a userdata, a table, a thread, or a function.
pub fn topointer(&mut self, idx: i32) -> *const libc::c_void {
#![inline(always)]
unsafe { self.as_extern().topointer(idx) }
}
/// Pushes a nil value onto the stack.
pub fn pushnil(&mut self) {
#![inline(always)]
unsafe { self.as_extern().pushnil() }
}
/// Pushes a number with value `n` onto the stack
pub fn pushnumber(&mut self, n: f64) {
#![inline(always)]
unsafe { self.as_extern().pushnumber(n) }
}
/// Pushes a number with value `n` onto the stack.
pub fn pushinteger(&mut self, n: isize) {
#![inline(always)]
unsafe { self.as_extern().pushinteger(n) }
}
/// Pushes a string onto the stack
pub fn pushstring(&mut self, s: &str) {
#![inline(always)]
unsafe { self.as_extern().pushstring(s) }
}
/// Pushes a byte vector onto the stack as a lua string
pub fn pushbytes(&mut self, bytes: &[u8]) {
#![inline(always)]
unsafe { self.as_extern().pushbytes(bytes) }
}
/// Pushes a new C closure onto the stack.
///
/// When a C function is created, it is possible to associate some values
/// with it, thus creating a C closure; these values are then accessible to
/// the function whenever it is called. These values must be pushed onto
/// the stack (in order), then pushclosure() is called to create and push
/// the C closure onto the stack. The argument `n` is the number of values
/// that should be associated with the function. These values are popped
/// from the stack.
///
/// `n` must be in the range [0, 255]. Anything outside this range will
/// throw an error.
pub fn pushcclosure(&mut self, f: CFunction, n: i32) {
#![inline(always)]
unsafe { self.as_extern().pushcclosure(f, n) }
}
/// Pushes a boolean value onto the stack.
pub fn pushboolean(&mut self, b: bool) {
#![inline(always)]
unsafe { self.as_extern().pushboolean(b) }
}
/// Pushes a light userdata onto the stack.
pub fn pushlightuserdata(&mut self, p: *mut libc::c_void) {
#![inline(always)]
unsafe { self.as_extern().pushlightuserdata(p) }
}
/// Pushes the thread represented by `self` onto the stack. Returns `true`
/// if this thread is the main thread of the state.
pub fn pushthread(&mut self) -> bool {
#![inline(always)]
unsafe { self.as_extern().pushthread() }
}
/// Pushes onto the stack the value t[k], where t is the value at the given
/// valid index and k is the value at the top of the stack. The key is
/// popped from the stack.
pub fn gettable(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().gettable(idx) }
}
/// Pushes onto the stack the value t[k], where t is the value at the given
/// valid index. Fails the task if `k` has any interior NULs.
pub fn getfield(&mut self, idx: i32, k: &str) {
#![inline(always)]
unsafe { self.as_extern().getfield(idx, k) }
}
/// Similar to gettable(), but does a raw access
pub fn rawget(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().rawget(idx) }
}
/// Pushes onto the stack the value t[n], where t is the value at the given
/// valid index. The access is raw; that is, it does not invoke
/// metamethods.
pub fn rawgeti(&mut self, idx: i32, n: i32) {
#![inline(always)]
unsafe { self.as_extern().rawgeti(idx, n) }
}
/// Creates a new empty table and pushes it into the stack. The new table
/// has space pre-allocated for `narr` array elements and `nrec` non-array
/// elements.
pub fn createtable(&mut self, narr: i32, nrec: i32) {
#![inline(always)]
unsafe { self.as_extern().createtable(narr, nrec) }
}
/// This method allocates a new block of memory with the given size, pushes
/// onto the stack a new full userdata with the block address, and returns
/// this address.
pub fn newuserdata(&mut self, size: usize) -> *mut libc::c_void {
#![inline(always)]
unsafe { self.as_extern().newuserdata(size) }
}
/// Pushes onto the stack the metatable of the value at the given
/// acceptable index. If the index is not valid, or the value does not have
/// a metatable, the function returns `false` and pushes nothing onto the
/// stack.
pub fn getmetatable(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().getmetatable(idx) }
}
/// Pushes onto the stack the environment table of the value at the given
/// index.
pub fn getfenv(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().getfenv(idx) }
}
/// Does the equivalent to t[k] = v, where t is the value at the given
/// valid index, v is the value at the top of the stack, and k is the value
/// just below the top.
///
/// This function pops both the key and the value from the stack.
pub fn settable(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().settable(idx) }
}
/// Does the equivalent to t[k] = v, where t is the value at the given
/// valid index and v is the value at the top of the stack.
///
/// This function pops the value from the stack.
///
/// Fails the task if `k` contains interior NULs.
pub fn setfield(&mut self, idx: i32, k: &str) {
#![inline(always)]
unsafe { self.as_extern().setfield(idx, k) }
}
/// Similar to settable(), but does a raw assignment.
pub fn rawset(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().rawset(idx) }
}
/// Does the equivalent of t[n] = v, where t is the value at the given
/// valid index and v is the value at the top of the stack.
///
/// This function pops the value from the stack. The assignment is raw;
/// that is, it does not invoke metamethods.
pub fn rawseti(&mut self, idx: i32, n: i32) {
#![inline(always)]
unsafe { self.as_extern().rawseti(idx, n) }
}
/// Pops a table from the stack and sets it as the new metatable for the
/// value at the given acceptable index.
pub fn setmetatable(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().setmetatable(idx) }
}
/// Pops a table from the stack and sets it as the new environment for the
/// value at the given index. If the value at the given index is neither a
/// function nor a thread nor a userdata, setfenv() returns `false`.
/// Otherwise, returns `true`.
pub fn setfenv(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().setfenv(idx) }
}
/// Calls a function.
/// The function must be pushed first, followed by its arguments. `nargs`
/// is the number of arguments. The function and its arguments are popped
/// automatically.
/// The function results are adjusted to `nresults`, unless `nresults` is
/// `MULTRET`, in which case all function results are pushed.
pub fn call(&mut self, nargs: i32, nresults: i32) {
#![inline(always)]
unsafe { self.as_extern().call(nargs, nresults) }
}
/// Calls a function in protected mode.
///
/// If no error occurs, this behaves identically to call() and returns
/// Ok(()). If there is any error, the error message is pushed onto the
/// stack, and an error code is returned. The function and its arguments
/// are always removed from the stack.
///
/// If `errfunc` is 0, then the error message returned on the stack is
/// exactly the original error message. Otherwise, `errfunc` is the stack
/// index of an error handler function. It must not be a pseudo-index.
pub fn pcall(&mut self, nargs: i32, nresults: i32, errfunc: i32) -> Result<(),PCallError> {
#![inline(always)]
unsafe { self.as_extern().pcall(nargs, nresults, errfunc) }
}
/// Loads a Lua chunk. If there are no errors, load() pushes the compiled
/// chunk as a Lua function on top of the stack. Otherwise, it pushes an
/// error message.
///
/// This method only loads a chunk; it does not run it.
///
/// load() automatically detects whether the chunk is text or binary, and
/// loads it accordingly.
///
/// The load() method uses a user-supplied `reader` function to read the
/// chunk. The `data` argument is an opaque value passed to the reader
/// function.
///
/// The `chunkname` argument gives a name to the chunk, which is used for
/// error messages and in debug information.
///
/// Fails the task if `chunkname` contains interior NULs.
pub fn load(&mut self, reader: Reader, data: *mut libc::c_void, chunkname: &str)
-> Result<(),LoadError> {
#![inline(always)]
unsafe { self.as_extern().load(reader, data, chunkname) }
}
/// Dumps a function as a binary chunk. Receives a Lua function on the top
/// of the stack and produces a binary chunk that, if loaded again, results
/// in a function equivalent to the one dumped. As it produces parts of the
/// chunk, dump() calls function `writer` with the given `data` to write
/// them.
///
/// The value returned is the error code returned by the last call to the
/// writer; Ok(()) means no errors.
///
/// This function does not pop the Lua function from the stack.
pub fn dump(&mut self, writer: Writer, data: *mut libc::c_void) -> Result<(),i32> {
#![inline(always)]
unsafe { self.as_extern().dump(writer, data) }
}
/// Yields a coroutine.
///
/// This function should only be called as the return expression of a C
/// function, as follows:
///
/// return L.yield_(nresults);
///
/// When a C function calls yield_() in that way, the running coroutine
/// suspends its execution, and the call to resume() that started this
/// coroutine returns. The parameter `nresults` is the number of values
/// from the stack that are passed as the results to resume().
pub fn yield_(&mut self, nresults: i32) -> c_int {
#![inline(always)]
unsafe { self.as_extern().yield_(nresults) }
}
/// Starts and resumes a coroutine in a given thread.
///
/// To start a coroutine, you first create a new thread (see thread());
/// then you push onto its stack the main function plus any arguments; then
/// you call resume(), with `narg` being the number of arguments. This call
/// returns when the coroutine suspends or finishes its execution. When it
/// returns, the stack contains all values passed to yield_(), or all
/// values returned by the body function. resume() returns Ok(false) if the
/// coroutine yields, Ok(true) if the coroutine finishes its execution
/// without errors, or Err(PCallError) in case of errors. In case of
/// errors, the stack is not unwound, so you can use the debug API over it.
/// The error message is on top of the stack. To restart a coroutine, you
/// put on its stack only the values to be passed as results from yield_(),
/// and then call resume().
pub fn resume(&mut self, narg: i32) -> Result<bool,PCallError> {
#![inline(always)]
unsafe { self.as_extern().resume(narg) }
}
/// Returns the status of the receiving thread.
///
/// The status can be Ok(true) for a normal thread, Ok(false) if the thread
/// is suspended, or Err(PCallError) if the thread finished its execution
/// with an error.
pub fn status(&mut self) -> Result<bool,PCallError> {
#![inline(always)]
unsafe { self.as_extern().status() }
}
/// Controls the garbage collector.
///
/// This method performs several tasks, according to the value of the
/// parameter `what`. See the `GC` enum for documentation on the various
/// options.
pub fn gc(&mut self, what: GC, data: i32) -> i32 {
#![inline(always)]
unsafe { self.as_extern().gc(what, data) }
}
/// Raises an error (using the value at the top of the stack)
pub fn error(&mut self) -> ! {
#![inline(always)]
unsafe { self.as_extern().error() }
}
/// Pops a key from the stack, and pushes a key-value pair from the table
/// at the given index (the "next" pair after the given key). If there are
/// no more elements in the table, then next() returns false (and pushes
/// nothing).
///
/// A typical traversal looks like this:
///
/// /* table is in the stack at index 't' */
/// L.pushnil(); // first key
/// while L.next(t) {
/// /* uses 'key' (at index -2) and 'value' (at index -1) */
/// println!("{} - {}", L.typename(-2), L.typename(-1));
/// /* removes 'value'; keeps 'key' for next iteration */
/// L.pop(1);
/// }
///
/// While traversing a table, do not call tostring() or tobytes() directly
/// on a key, unless you know that the key is actually a string. Recall
/// that tostring() changes the value at the given index; this confuses the
/// next call to next().
pub fn next(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().next(idx) }
}
/// Concatenates the `n` values at the top of the stack, pops them, and
/// leaves the result at the top.
/// Errors if n is negative or larger than the stack top.
pub fn concat(&mut self, n: i32) {
#![inline(always)]
unsafe { self.as_extern().concat(n) }
}
/// Returns the memory-allocation function of a given state. If `ud` is not
/// NULL, Lua stores in `*ud` the opaque pointer passed to lua_newstate().
///
/// Note: State::new() always provides NULL as the opaque pointer. It also
/// provides a default alloc function that behaves identically to the one
/// used by luaL_newstate().
pub unsafe fn getallocf(&mut self, ud: *mut *mut libc::c_void) -> Alloc {
#![inline(always)]
self.as_extern().getallocf(ud)
}
/// Changes the allocator function of a given state to `f` with user data
/// `ud`.
pub unsafe fn setallocf(&mut self, f: Alloc, ud: *mut libc::c_void) {
#![inline(always)]
self.as_extern().setallocf(f, ud)
}
/// Pop n elements from the stack.
/// Errors if the stack is smaller than n
pub fn pop(&mut self, n: i32) {
#![inline(always)]
unsafe { self.as_extern().pop(n) }
}
/// Creates a new empty table and pushes it onto the stack.
/// It is equivalent to .createtable(0, 0).
pub fn newtable(&mut self) {
#![inline(always)]
unsafe { self.as_extern().newtable() }
}
/// Sets the C function `f` as the new value of global `name`.
/// Fails the task if `name` has interior NULs.
pub fn register(&mut self, name: &str, f: CFunction) {
#![inline(always)]
unsafe { self.as_extern().register(name, f) }
}
/// Pushes a C function onto the stack.
pub fn pushcfunction(&mut self, f: CFunction) {
#![inline(always)]
unsafe { self.as_extern().pushcfunction(f) }
}
/// Returns `true` if the value at the given acceptable index is a function
/// (either C or Lua).
pub fn isfunction(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isfunction(idx) }
}
/// Returns `true` if the value at the given acceptable index is a table.
pub fn istable(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().istable(idx) }
}
/// Returns `true` if the value at the given acceptable index is a light
/// userdata.
pub fn islightuserdata(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().islightuserdata(idx) }
}
/// Returns `true` if the value at the given acceptable index is `nil`.
pub fn isnil(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnil(idx) }
}
/// Returns `true` if the value at the given acceptable index has type
/// boolean.
pub fn isboolean(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isboolean(idx) }
}
/// Returns `true` if the value at the given acceptable index is a thread.
pub fn isthread(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isthread(idx) }
}
/// Returns `true` if the given acceptable index is not valid.
pub fn isnone(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnone(idx) }
}
/// Returns `true` if the given acceptable index is not valid or if the
/// value at this index is nil.
pub fn isnoneornil(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnoneornil(idx) }
}
/// Pops a value from the stack and sets it as the new value of global
/// `name`. Fails the task if `name` has interior NULs.
pub fn setglobal(&mut self, name: &str) {
#![inline(always)]
unsafe { self.as_extern().setglobal(name) }
}
/// Pushes onto the stack the value of the global `name`.
/// Fails the task if `name` has interior NULs.
pub fn getglobal(&mut self, name: &str) {
#![inline(always)]
unsafe { self.as_extern().getglobal(name) }
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub unsafe fn newthread(&mut self) -> State {
self.as_raw().newthread()
}
pub unsafe fn atpanic(&mut self, panicf: CFunction) -> CFunction {
self.as_raw().atpanic(panicf)
}
unsafe fn check_acceptable(&mut self, idx: i32) {
if idx > 0 {
luaassert!(self, idx <= self.stackspace,
"index {} is not acceptable (stack space is {})", idx, self.stackspace);
} else if idx < 0 {
self.check_valid(idx, true);
} else {
self.errorstr("index 0 is not acceptable");
}
}
unsafe fn check_valid(&mut self, idx: i32, allowpseudo: bool) {
match idx {
0 => self.errorstr("index 0 is not valid"),
GLOBALSINDEX |
REGISTRYINDEX |
ENVIRONINDEX => luaassert!(self, allowpseudo,
"Pseudo-indices are not valid for this call"),
_ if idx < GLOBALSINDEX => {
luaassert!(self, allowpseudo, "Pseudo-indices are not valid for this call");
// we can't actually test for upvalue validity
// at least not without using lua_Debug, which seems excessive.
// However, I think that invalid but acceptable upvalues are treated as nil
let upvalidx = GLOBALSINDEX - idx;
luaassert!(self, upvalidx <= 256, "upvalue index {} is out of range", upvalidx);
}
_ => {
let top = self.gettop();
luaassert!(self, idx.abs() <= top, "index {} is not valid (stack top is {})", idx,
top);
}
}
}
pub unsafe fn describe(&mut self, idx: i32) -> String {
self.check_acceptable(idx);
self.checkstack_(1);
self.as_raw().describe(idx)
}
pub unsafe fn describe_(&mut self, idx: i32, usestack: bool) -> String {
self.check_acceptable(idx);
if usestack { self.checkstack_(1); }
self.as_raw().describe_(idx, usestack)
}
pub fn gettop(&mut self) -> i32 {
self.as_raw().gettop()
}
pub unsafe fn settop(&mut self, idx: i32) {
if idx != 0 { self.check_acceptable(idx); }
self.as_raw().settop(idx);
}
pub unsafe fn pushvalue(&mut self, idx: i32) {
self.check_valid(idx, true);
self.checkstack_(1);
self.as_raw().pushvalue(idx)
}
pub unsafe fn remove(&mut self, idx: i32) {
self.check_valid(idx, false);
self.as_raw().remove(idx)
}
pub unsafe fn insert(&mut self, idx: i32) {
self.check_valid(idx, false);
self.as_raw().insert(idx)
}
pub unsafe fn replace(&mut self, idx: i32) {
self.check_valid(idx, true);
self.as_raw().replace(idx)
}
pub unsafe fn checkstack(&mut self, extra: i32) -> bool {
self.as_raw().checkstack(extra)
}
pub unsafe fn checkstack_(&mut self, extra: i32) {
self.as_raw().checkstack_(extra)
}
pub unsafe fn xmove(&mut self, to: &mut ExternState, n: i32) {
luaassert!(self, self.gettop() >= n, "xmove: stack underflow");
to.checkstack_(1);
self.as_raw().xmove(to.as_raw(), n)
}
pub unsafe fn isnumber(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnumber(idx)
}
pub unsafe fn isstring(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isstring(idx)
}
pub unsafe fn iscfunction(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().iscfunction(idx)
}
pub unsafe fn isuserdata(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isuserdata(idx)
}
pub unsafe fn type_(&mut self, idx: i32) -> Option<Type> {
self.check_acceptable(idx);
self.as_raw().type_(idx)
}
pub unsafe fn typename(&mut self, idx: i32) -> &'static str {
self.check_acceptable(idx);
self.as_raw().typename(idx)
}
pub unsafe fn equal(&mut self, index1: i32, index2: i32) -> bool {
self.check_acceptable(index1);
self.check_acceptable(index2);
self.as_raw().equal(index1, index2)
}
pub unsafe fn rawequal(&mut self, index1: i32, index2: i32) -> bool {
self.check_acceptable(index1);
self.check_acceptable(index2);
self.as_raw().rawequal(index1, index2)
}
pub unsafe fn lessthan(&mut self, index1: i32, index2: i32) -> bool {
self.check_acceptable(index1);
self.check_acceptable(index2);
self.as_raw().lessthan(index1, index2)
}
pub unsafe fn tonumber(&mut self, idx: i32) -> f64 {
self.check_acceptable(idx);
self.as_raw().tonumber(idx)
}
pub unsafe fn tointeger(&mut self, idx: i32) -> isize {
self.check_acceptable(idx);
self.as_raw().tointeger(idx)
}
pub unsafe fn toboolean(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().toboolean(idx)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// ExternState, but its lifetime is actually that of the value on the
/// stack.
pub unsafe fn tostring(&mut self, idx: i32) -> Option<&'static str> {
self.check_acceptable(idx);
self.as_raw().tostring(idx)
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// ExternState, but its lifetime is actually that of hte value on the
/// stack.
pub unsafe fn tobytes(&mut self, idx: i32) -> Option<&'static [u8]> {
self.check_acceptable(idx);
self.as_raw().tobytes(idx)
}
pub unsafe fn objlen(&mut self, idx: i32) -> usize {
self.check_acceptable(idx);
self.as_raw().objlen(idx)
}
pub unsafe fn tocfunction(&mut self, idx: i32) -> Option<CFunction> {
self.check_acceptable(idx);
self.as_raw().tocfunction(idx)
}
pub unsafe fn touserdata(&mut self, idx: i32) -> *mut libc::c_void {
self.check_acceptable(idx);
self.as_raw().touserdata(idx)
}
pub unsafe fn tothread(&mut self, idx: i32) -> Option<ExternState> {
self.check_acceptable(idx);
self.as_raw().tothread(idx)
}
pub unsafe fn topointer(&mut self, idx: i32) -> *const libc::c_void {
self.check_acceptable(idx);
self.as_raw().topointer(idx)
}
pub unsafe fn pushnil(&mut self) {
self.checkstack_(1);
self.as_raw().pushnil()
}
pub unsafe fn pushnumber(&mut self, n: f64) {
self.checkstack_(1);
self.as_raw().pushnumber(n)
}
pub unsafe fn pushinteger(&mut self, n: isize) {
self.checkstack_(1);
self.as_raw().pushinteger(n)
}
pub unsafe fn pushstring(&mut self, s: &str) {
self.checkstack_(1);
self.as_raw().pushstring(s)
}
pub unsafe fn pushbytes(&mut self, bytes: &[u8]) {
self.checkstack_(1);
self.as_raw().pushbytes(bytes)
}
pub unsafe fn pushcclosure(&mut self, f: CFunction, n: i32) {
if n == 0 {
self.checkstack_(1);
} else {
luaassert!(self, n >= 0 && n <= 255, "pushcclosure: invalid argument n");
}
self.as_raw().pushcclosure(f, n)
}
pub unsafe fn pushboolean(&mut self, b: bool) {
self.checkstack_(1);
self.as_raw().pushboolean(b)
}
pub unsafe fn pushlightuserdata(&mut self, p: *mut libc::c_void) {
self.checkstack_(1);
self.as_raw().pushlightuserdata(p)
}
pub unsafe fn pushthread(&mut self) -> bool {
self.checkstack_(1);
self.as_raw().pushthread()
}
pub unsafe fn gettable(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() > 0, "gettable: stack underflow");
self.as_raw().gettable(idx)
}
pub unsafe fn getfield(&mut self, idx: i32, k: &str) {
self.check_valid(idx, true);
self.checkstack_(1);
self.as_raw().getfield(idx, k)
}
pub unsafe fn rawget(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() > 0, "rawget: stack underflow");
self.as_raw().rawget(idx)
}
pub unsafe fn rawgeti(&mut self, idx: i32, n: i32) {
self.check_valid(idx, true);
self.checkstack_(1);
self.as_raw().rawgeti(idx, n)
}
pub unsafe fn createtable(&mut self, narr: i32, nrec: i32) {
self.checkstack_(1);
self.as_raw().createtable(narr, nrec)
}
pub unsafe fn newuserdata(&mut self, size: usize) -> *mut libc::c_void {
self.checkstack_(1);
self.as_raw().newuserdata(size)
}
pub unsafe fn getmetatable(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.checkstack_(1);
self.as_raw().getmetatable(idx)
}
pub unsafe fn getfenv(&mut self, idx: i32) {
self.check_acceptable(idx);
self.checkstack_(1);
self.as_raw().getfenv(idx)
}
pub unsafe fn settable(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() >= 2, "settable: stack underflow");
self.as_raw().settable(idx)
}
pub unsafe fn setfield(&mut self, idx: i32, k: &str) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() >= 1, "setfield: stack underflow");
self.as_raw().setfield(idx, k)
}
pub unsafe fn rawset(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() >= 2, "rawset: stack underflow");
self.as_raw().rawset(idx)
}
pub unsafe fn rawseti(&mut self, idx: i32, n: i32) {
self.check_valid(idx, true);
self.as_raw().rawseti(idx, n)
}
pub unsafe fn setmetatable(&mut self, idx: i32) {
self.check_acceptable(idx);
luaassert!(self, self.istable(-1), "setmetatable: top stack value must be a table");
self.as_raw().setmetatable(idx)
}
pub unsafe fn setfenv(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
luaassert!(self, self.istable(-1), "setfenv: top stack value must be a table");
self.as_raw().setfenv(idx)
}
pub unsafe fn call(&mut self, nargs: i32, nresults: i32) {
luaassert!(self, nargs >= 0, "call: invalid nargs");
luaassert!(self, nresults == MULTRET || nresults >= 0, "call: invalid nresults");
luaassert!(self, self.gettop() > nargs, "call: stack underflow");
if nresults > nargs + 1 { self.checkstack_(nargs - nresults - 1) }
self.as_raw().call(nargs, nresults)
}
pub unsafe fn pcall(&mut self, nargs: i32, nresults: i32, errfunc: i32)
-> Result<(),PCallError> {
luaassert!(self, nargs >= 0, "pcall: invalid nargs");
luaassert!(self, nresults == MULTRET || nresults >= 0, "pcall: invalid nresults");
luaassert!(self, self.gettop() > nargs, "pcall: stack underflow");
if errfunc != 0 {
self.check_valid(errfunc, false)
}
if nresults > nargs + 1 { self.checkstack_(nargs - nresults - 1) }
self.as_raw().pcall(nargs, nresults, errfunc)
}
pub unsafe fn load(&mut self, reader: Reader, data: *mut libc::c_void, chunkname: &str)
-> Result<(),LoadError> {
self.checkstack_(1);
self.as_raw().load(reader, data, chunkname)
}
pub unsafe fn dump(&mut self, writer: Writer, data: *mut libc::c_void) -> Result<(),i32> {
luaassert!(self, self.gettop() >= 1, "dump: stack underflow");
self.as_raw().dump(writer, data)
}
pub unsafe fn yield_(&mut self, nresults: i32) -> c_int {
luaassert!(self, self.gettop() >= nresults, "yield: stack underflow");
self.as_raw().yield_(nresults)
}
pub unsafe fn resume(&mut self, narg: i32) -> Result<bool,PCallError> {
luaassert!(self, self.gettop() > narg, "resume: stack underflow");
self.as_raw().resume(narg)
}
pub unsafe fn status(&mut self) -> Result<bool,PCallError> {
self.as_raw().status()
}
pub unsafe fn gc(&mut self, what: GC, data: i32) -> i32 {
self.as_raw().gc(what, data)
}
pub unsafe fn error(&mut self) -> ! {
luaassert!(self, self.gettop() > 0, "error: stack underflow");
self.as_raw().error()
}
pub unsafe fn next(&mut self, idx: i32) -> bool {
self.check_valid(idx, true);
self.as_raw().next(idx)
}
pub unsafe fn concat(&mut self, n: i32) {
luaassert!(self, n >= 0, "concat: invalid argument n");
luaassert!(self, n <= self.gettop(), "concat: stack underflow");
if n == 0 { self.checkstack_(1) }
self.as_raw().concat(n)
}
pub unsafe fn getallocf(&mut self, ud: *mut *mut libc::c_void) -> Alloc {
self.as_raw().getallocf(ud)
}
pub unsafe fn setallocf(&mut self, f: Alloc, ud: *mut libc::c_void) {
self.as_raw().setallocf(f, ud)
}
pub unsafe fn pop(&mut self, n: i32) {
if n >= 0 {
luaassert!(self, self.gettop() >= n, "pop: stack underflow");
} else {
luaassert!(self, self.gettop() >= (n+1).abs(), "pop: stack underflow");
}
self.as_raw().pop(n)
}
pub unsafe fn newtable(&mut self) {
self.checkstack_(1);
self.as_raw().newtable()
}
pub unsafe fn register(&mut self, name: &str, f: CFunction) {
self.checkstack_(1);
self.as_raw().register(name, f)
}
pub unsafe fn pushcfunction(&mut self, f: CFunction) {
self.checkstack_(1);
self.as_raw().pushcfunction(f)
}
pub unsafe fn isfunction(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isfunction(idx)
}
pub unsafe fn istable(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().istable(idx)
}
pub unsafe fn islightuserdata(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().islightuserdata(idx)
}
pub unsafe fn isnil(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnil(idx)
}
pub unsafe fn isboolean(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isboolean(idx)
}
pub unsafe fn isthread(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isthread(idx)
}
pub unsafe fn isnone(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnone(idx)
}
pub unsafe fn isnoneornil(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnoneornil(idx)
}
pub unsafe fn setglobal(&mut self, name: &str) {
luaassert!(self, self.gettop() > 0, "setglobal: stack underflow");
self.as_raw().setglobal(name)
}
pub unsafe fn getglobal(&mut self, name: &str) {
self.checkstack_(1);
self.as_raw().getglobal(name)
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub unsafe fn newthread(&mut self) -> State {
#![inline]
mem::transmute(ExternState::from_lua_State(raw::lua_newthread(self.L)))
}
pub unsafe fn atpanic(&mut self, panicf: CFunction) -> CFunction {
#![inline]
raw::lua_atpanic(self.L, panicf)
}
pub unsafe fn describe(&mut self, idx: i32) -> String {
self.describe_(idx, true)
}
pub unsafe fn describe_(&mut self, idx: i32, usestack: bool) -> String {
match self.type_(idx) {
None => "".to_string(),
Some(typ) => match typ {
Type::Nil => "nil".to_string(),
Type::Boolean => if self.toboolean(idx) { "true".to_string() }
else { "false".to_string() },
Type::Number => {
// Let Lua create the string instead of us
if usestack { self.pushvalue(idx); } // copy the value
let s = self.tostring(-1).map(|s| s.to_string());
if usestack { self.pop(1); } // remove the copied value
s.unwrap_or_default() // default will be ~""
}
Type::String => {
self.tostring(idx).unwrap_or("<invalid utf8>").to_string()
}
Type::LightUserdata |
Type::Userdata |
Type::Table |
Type::Thread |
Type::Function => {
let s = self.typename(idx);
let p = self.topointer(idx);
format!("<{} {:p}>", s, p)
}
}
}
}
pub fn gettop(&mut self) -> i32 {
#![inline]
unsafe { raw::lua_gettop(self.L) as i32 }
}
pub unsafe fn settop(&mut self, idx: i32) {
#![inline]
raw::lua_settop(self.L, idx as c_int)
}
pub unsafe fn pushvalue(&mut self, idx: i32) {
#![inline]
raw::lua_pushvalue(self.L, idx as c_int)
}
pub unsafe fn remove(&mut self, idx: i32) {
#![inline]
raw::lua_remove(self.L, idx as c_int)
}
pub unsafe fn insert(&mut self, idx: i32) {
#![inline]
raw::lua_insert(self.L, idx as c_int)
}
pub unsafe fn replace(&mut self, idx: i32) {
#![inline]
raw::lua_replace(self.L, idx as c_int)
}
pub unsafe fn checkstack(&mut self, extra: i32) -> bool {
#![inline]
let top = self.gettop();
if top + extra > self.stackspace {
if raw::lua_checkstack(self.L, extra as c_int) != 0 {
self.stackspace = top + extra;
true
} else {
false
}
} else {
true
}
}
pub unsafe fn checkstack_(&mut self, extra: i32) {
#![inline]
luaassert!(self, self.checkstack(extra), "checkstack: cannot grow stack")
}
pub unsafe fn xmove(&mut self, to: &mut RawState, n: i32) {
#![inline]
raw::lua_xmove(self.L, to.L, n as c_int)
}
pub unsafe fn isnumber(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnumber(self.L, idx as c_int) != 0
}
pub unsafe fn isstring(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isstring(self.L, idx as c_int) != 0
}
pub unsafe fn iscfunction(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_iscfunction(self.L, idx as c_int) != 0
}
pub unsafe fn isuserdata(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isuserdata(self.L, idx as c_int) != 0
}
pub unsafe fn type_(&mut self, idx: i32) -> Option<Type> {
#![inline]
match raw::lua_type(self.L, idx as c_int) {
raw::LUA_TNONE => None,
raw::LUA_TNIL => Some(Type::Nil),
raw::LUA_TBOOLEAN => Some(Type::Boolean),
raw::LUA_TLIGHTUSERDATA => Some(Type::LightUserdata),
raw::LUA_TNUMBER => Some(Type::Number),
raw::LUA_TSTRING => Some(Type::String),
raw::LUA_TTABLE => Some(Type::Table),
raw::LUA_TFUNCTION => Some(Type::Function),
raw::LUA_TUSERDATA => Some(Type::Userdata),
raw::LUA_TTHREAD => Some(Type::Thread),
_ => self.errorstr("type: Unknown return value from lua_type")
}
}
pub unsafe fn typename(&mut self, idx: i32) -> &'static str {
#![inline]
let s = aux::raw::luaL_typename(self.L, idx as c_int);
mem::transmute::<&str, &'static str>(str::from_utf8(ffi::c_str_to_bytes(&s)).unwrap())
}
pub unsafe fn equal(&mut self, index1: i32, index2: i32) -> bool {
#![inline]
raw::lua_equal(self.L, index1 as c_int, index2 as c_int) != 0
}
pub unsafe fn rawequal(&mut self, index1: i32, index2: i32) -> bool {
#![inline]
raw::lua_rawequal(self.L, index1 as c_int, index2 as c_int) != 0
}
pub unsafe fn lessthan(&mut self, index1: i32, index2: i32) -> bool {
#![inline]
raw::lua_lessthan(self.L, index1 as c_int, index2 as c_int) != 0
}
pub unsafe fn tonumber(&mut self, idx: i32) -> f64 {
#![inline]
raw::lua_tonumber(self.L, idx as c_int) as f64
}
pub unsafe fn tointeger(&mut self, idx: i32) -> isize {
#![inline]
raw::lua_tointeger(self.L, idx as c_int) as isize
}
pub unsafe fn toboolean(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_toboolean(self.L, idx as c_int) != 0
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
// TODO: change return type to use core::str::Utf8Error
pub unsafe fn tostring(&mut self, idx: i32) -> Option<&'static str> {
#![inline]
self.tobytes(idx).and_then(|v| str::from_utf8(v).ok())
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn tobytes(&mut self, idx: i32) -> Option<&'static [u8]> {
#![inline]
let mut sz: libc::size_t = 0;
let s = raw::lua_tolstring(self.L, idx, &mut sz);
if s.is_null() {
None
} else {
let buf = s as *const u8;
Some(slice::from_raw_parts(buf, sz as usize))
}
}
pub unsafe fn objlen(&mut self, idx: i32) -> usize {
#![inline]
raw::lua_objlen(self.L, idx as c_int) as usize
}
pub unsafe fn tocfunction(&mut self, idx: i32) -> Option<CFunction> {
#![inline]
raw::lua_tocfunction(self.L, idx as c_int)
}
pub unsafe fn touserdata(&mut self, idx: i32) -> *mut libc::c_void {
#![inline]
raw::lua_touserdata(self.L, idx as c_int)
}
pub unsafe fn tothread(&mut self, idx: i32) -> Option<ExternState> {
#![inline]
let s = raw::lua_tothread(self.L, idx as c_int);
if s.is_null() {
None
} else {
Some(ExternState { L: s, stackspace: 0, _marker: marker::PhantomData })
}
}
pub unsafe fn topointer(&mut self, idx: i32) -> *const libc::c_void {
#![inline]
raw::lua_topointer(self.L, idx as c_int)
}
pub unsafe fn pushnil(&mut self) {
#![inline]
raw::lua_pushnil(self.L)
}
pub unsafe fn pushnumber(&mut self, n: f64) {
#![inline]
raw::lua_pushnumber(self.L, n as raw::lua_Number)
}
pub unsafe fn pushinteger(&mut self, n: isize) {
#![inline]
raw::lua_pushinteger(self.L, n as raw::lua_Integer)
}
pub unsafe fn pushstring(&mut self, s: &str) {
#![inline]
raw::lua_pushlstring(self.L, s.as_ptr() as *const libc::c_char, s.len() as libc::size_t)
}
pub unsafe fn pushbytes(&mut self, bytes: &[u8]) {
#![inline]
raw::lua_pushlstring(self.L, bytes.as_ptr() as *const libc::c_char, bytes.len() as libc::size_t)
}
pub unsafe fn pushcclosure(&mut self, f: CFunction, n: i32) {
#![inline]
raw::lua_pushcclosure(self.L, f, n as c_int)
}
pub unsafe fn pushboolean(&mut self, b: bool) {
#![inline]
raw::lua_pushboolean(self.L, b as c_int)
}
pub unsafe fn pushlightuserdata(&mut self, p: *mut libc::c_void) {
#![inline]
raw::lua_pushlightuserdata(self.L, p)
}
pub unsafe fn pushthread(&mut self) -> bool {
#![inline]
raw::lua_pushthread(self.L) != 0
}
pub unsafe fn gettable(&mut self, idx: i32) {
#![inline]
raw::lua_gettable(self.L, idx as c_int)
}
pub unsafe fn getfield(&mut self, idx: i32, k: &str) {
#![inline]
raw::lua_getfield(self.L, idx as c_int, CString::from_slice(k.as_bytes()).as_ptr())
}
pub unsafe fn rawget(&mut self, idx: i32) {
#![inline]
raw::lua_rawget(self.L, idx as c_int)
}
pub unsafe fn rawgeti(&mut self, idx: i32, n: i32) {
#![inline]
raw::lua_rawgeti(self.L, idx as c_int, n as c_int)
}
pub unsafe fn createtable(&mut self, narr: i32, nrec: i32) {
#![inline]
raw::lua_createtable(self.L, narr as c_int, nrec as c_int)
}
pub unsafe fn newuserdata(&mut self, size: usize) -> *mut libc::c_void {
#![inline]
raw::lua_newuserdata(self.L, size as libc::size_t)
}
pub unsafe fn getmetatable(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_getmetatable(self.L, idx as c_int) != 0
}
pub unsafe fn getfenv(&mut self, idx: i32) {
#![inline]
raw::lua_getfenv(self.L, idx as c_int)
}
pub unsafe fn settable(&mut self, idx: i32) {
#![inline]
raw::lua_settable(self.L, idx as c_int)
}
pub unsafe fn setfield(&mut self, idx: i32, k: &str) {
#![inline]
raw::lua_setfield(self.L, idx as c_int, CString::from_slice(k.as_bytes()).as_ptr())
}
pub unsafe fn rawset(&mut self, idx: i32) {
#![inline]
raw::lua_rawset(self.L, idx as c_int)
}
pub unsafe fn rawseti(&mut self, idx: i32, n: i32) {
#![inline]
raw::lua_rawseti(self.L, idx as c_int, n as c_int)
}
pub unsafe fn setmetatable(&mut self, idx: i32) {
#![inline]
// ignore return value of lua_setmetatable(), it appears to always be 1
raw::lua_setmetatable(self.L, idx as c_int);
}
pub unsafe fn setfenv(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_setfenv(self.L, idx as c_int) != 0
}
pub unsafe fn call(&mut self, nargs: i32, nresults: i32) {
#![inline]
raw::lua_call(self.L, nargs as c_int, nresults as c_int)
}
pub unsafe fn pcall(&mut self, nargs: i32, nresults: i32, errfunc: i32)
-> Result<(),PCallError> {
match raw::lua_pcall(self.L, nargs as c_int, nresults as c_int, errfunc as c_int) {
0 => Ok(()),
i => Err(PCallError::from_code(i).unwrap_or_else(|| {
self.errorstr("pcall: unexpected error from lua_pcall")
}))
}
}
pub unsafe fn load(&mut self, reader: Reader, data: *mut libc::c_void, chunkname: &str)
-> Result<(),LoadError> {
let cstr = CString::from_slice(chunkname.as_bytes());
match raw::lua_load(self.L, reader, data, cstr.as_ptr()) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadError::ErrMem),
_ => self.errorstr("load: unexpected error from lua_load")
}
}
pub unsafe fn dump(&mut self, writer: Writer, data: *mut libc::c_void) -> Result<(),i32> {
#![inline]
match raw::lua_dump(self.L, writer, data) {
0 => Ok(()),
i => Err(i)
}
}
pub unsafe fn yield_(&mut self, nresults: i32) -> c_int {
#![inline]
raw::lua_yield(self.L, nresults as c_int)
}
pub unsafe fn resume(&mut self, narg: i32) -> Result<bool,PCallError> {
#![inline]
match raw::lua_resume(self.L, narg as c_int) {
raw::LUA_YIELD => Ok(false),
0 => Ok(true),
i => Err(PCallError::from_code(i).unwrap_or_else(|| {
self.errorstr("resume: unexpected error from lua_resume")
}))
}
}
pub unsafe fn status(&mut self) -> Result<bool,PCallError> {
#![inline]
match raw::lua_status(self.L) {
raw::LUA_YIELD => Ok(false),
0 => Ok(true),
i => Err(PCallError::from_code(i).unwrap_or_else(|| {
self.errorstr("status: unexpected error from lua_status")
}))
}
}
pub unsafe fn gc(&mut self, what: GC, data: i32) -> i32 {
#![inline]
raw::lua_gc(self.L, what as c_int, data as c_int) as i32
}
pub unsafe fn error(&mut self) -> ! {
#![inline]
raw::lua_error(self.L);
unreachable!()
}
pub unsafe fn next(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_next(self.L, idx as c_int) != 0
}
pub unsafe fn concat(&mut self, n: i32) {
#![inline]
raw::lua_concat(self.L, n as c_int)
}
pub unsafe fn getallocf(&mut self, ud: *mut *mut libc::c_void) -> Alloc {
#![inline]
raw::lua_getallocf(self.L, ud)
}
pub unsafe fn setallocf(&mut self, f: Alloc, ud: *mut libc::c_void) {
#![inline]
raw::lua_setallocf(self.L, f, ud)
}
pub unsafe fn pop(&mut self, n: i32) {
#![inline]
raw::lua_pop(self.L, n as c_int)
}
pub unsafe fn newtable(&mut self) {
#![inline]
raw::lua_newtable(self.L)
}
pub unsafe fn register(&mut self, name: &str, f: CFunction) {
#![inline]
raw::lua_register(self.L, CString::from_slice(name.as_bytes()).as_ptr(), f)
}
pub unsafe fn pushcfunction(&mut self, f: CFunction) {
#![inline]
raw::lua_pushcfunction(self.L, f)
}
pub unsafe fn isfunction(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isfunction(self.L, idx as c_int)
}
pub unsafe fn istable(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_istable(self.L, idx as c_int)
}
pub unsafe fn islightuserdata(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_islightuserdata(self.L, idx)
}
pub unsafe fn isnil(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnil(self.L, idx)
}
pub unsafe fn isboolean(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isboolean(self.L, idx)
}
pub unsafe fn isthread(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isthread(self.L, idx)
}
pub unsafe fn isnone(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnone(self.L, idx)
}
pub unsafe fn isnoneornil(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnoneornil(self.L, idx)
}
pub unsafe fn setglobal(&mut self, name: &str) {
#![inline]
raw::lua_setglobal(self.L, CString::from_slice(name.as_bytes()).as_ptr())
}
pub unsafe fn getglobal(&mut self, name: &str) {
#![inline]
raw::lua_getglobal(self.L, CString::from_slice(name.as_bytes()).as_ptr())
}
}
/// Name for the coroutine lib
pub const COLIBNAME: &'static str = lib::raw::LUA_COLIBNAME;
/// Name for the table lib
pub const TABLIBNAME: &'static str = lib::raw::LUA_TABLIBNAME;
/// Name for the io lib
pub const IOLIBNAME: &'static str = lib::raw::LUA_IOLIBNAME;
/// Name for the os lib
pub const OSLIBNAME: &'static str = lib::raw::LUA_OSLIBNAME;
/// Name for the string lib
pub const STRLIBNAME: &'static str = lib::raw::LUA_STRLIBNAME;
/// Name for the math lib
pub const MATHLIBNAME: &'static str = lib::raw::LUA_MATHLIBNAME;
/// Name for the debug lib
pub const DBLIBNAME: &'static str = lib::raw::LUA_DBLIBNAME;
/// Name for the package lib
pub const LOADLIBNAME: &'static str = lib::raw::LUA_LOADLIBNAME;
// Functions from lualib
impl State {
/// Open the basic library.
pub fn open_base(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_base() }
}
/// Opens the table library.
pub fn open_table(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_table() }
}
/// Opens the io library.
pub fn open_io(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_io() }
}
/// Opens the os library.
pub fn open_os(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_os() }
}
/// Opens the string library.
pub fn open_string(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_string() }
}
/// Opens the math library.
pub fn open_math(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_math() }
}
/// Opens the debug library.
pub fn open_debug(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_debug() }
}
/// Opens the package library.
pub fn open_package(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_package() }
}
/// Opens all standard Lua libraries.
pub fn openlibs(&mut self) {
#![inline(always)]
unsafe { self.as_extern().openlibs() }
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub unsafe fn open_base(&mut self) {
self.checkstack_(2);
self.as_raw().open_base()
}
pub unsafe fn open_table(&mut self) {
self.checkstack_(2);
self.as_raw().open_table()
}
pub unsafe fn open_io(&mut self) {
self.checkstack_(2);
self.as_raw().open_io()
}
pub unsafe fn open_os(&mut self) {
self.checkstack_(2);
self.as_raw().open_os()
}
pub unsafe fn open_string(&mut self) {
self.checkstack_(2);
self.as_raw().open_string()
}
pub unsafe fn open_math(&mut self) {
self.checkstack_(2);
self.as_raw().open_math()
}
pub unsafe fn open_debug(&mut self) {
self.checkstack_(2);
self.as_raw().open_debug()
}
pub unsafe fn open_package(&mut self) {
self.checkstack_(2);
self.as_raw().open_package()
}
pub unsafe fn openlibs(&mut self) {
self.checkstack_(2);
self.as_raw().openlibs()
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub unsafe fn open_base(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_base);
self.pushstring("");
self.call(1, 0);
}
pub unsafe fn open_table(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_table);
self.pushstring(TABLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_io(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_io);
self.pushstring(IOLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_os(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_os);
self.pushstring(OSLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_string(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_string);
self.pushstring(STRLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_math(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_math);
self.pushstring(MATHLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_debug(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_debug);
self.pushstring(DBLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_package(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_package);
self.pushstring(LOADLIBNAME);
self.call(1, 0);
}
pub unsafe fn openlibs(&mut self) {
#![inline]
lib::raw::luaL_openlibs(self.L)
}
}
pub const NOREF: i32 = aux::raw::LUA_NOREF as i32;
pub const REFNIL: i32 = aux::raw::LUA_REFNIL as i32;
// Functions from auxlib
impl State {
/// Opens a library.
///
/// When called with `libname` equal to None, it simply registers all
/// functions in the list `l` into the table on the top of the stack.
///
/// When called with a `libname` of Some(_), registerlib() creates a new
/// table `t`, sets it as the value of the global variable `libname`, sets
/// it as the value of `package.loaded[libname]`, and registers on it all
/// functions in the list `l`. If there is a table in
/// `package.loaded[libname]` or in variable `libname`, reuses this table
/// instead of creating a new one.
///
/// In any case the function leaves the table on the top of the stack.
pub fn registerlib(&mut self, libname: Option<&str>, l: &[(&str,CFunction)]) {
#![inline(always)]
unsafe { self.as_extern().registerlib(libname, l) }
}
/// Pushes onto the stack the field `e` from the metatable of the object at
/// index `obj`. If the object does not have a metatable, or if the
/// metatable does not have this field, returns `false` and pushes nothing.
pub fn getmetafield(&mut self, obj: i32, e: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().getmetafield(obj, e) }
}
/// Calls a metamethod.
///
/// If the object at index `obj` has a metatable and this metatable has a
/// field `e`, this method calls this field and passes the object as its
/// only argument. In this case this method returns `true` and pushes onto
/// the stack the value returned by the call. If there is no metatable or
/// no metamethod, this method returns `false` (without pushing any value
/// on the stack).
pub fn callmeta(&mut self, obj: i32, e: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().callmeta(obj, e) }
}
/// Generates an error with a message like the following:
///
/// <location>: bad argument <narg> to '<func>' (<tname> expected, got <rt>)
///
/// where `location` is produced by where(), `func` is the name of the
/// current function, and `rt` is the type name of the actual argument.
pub fn typerror(&mut self, narg: i32, tname: &str) -> ! {
#![inline(always)]
unsafe { self.as_extern().typerror(narg, tname) }
}
/// Raises an error with the following message, where `func` is taken from
/// the call stack:
///
/// bad argument #<narg> to <func> (<extramsg>)
pub fn argerror(&mut self, narg: i32, extramsg: &str) -> ! {
#![inline(always)]
unsafe { self.as_extern().argerror(narg, extramsg) }
}
/// Checks whether the function argument `narg` is a string, and returns
/// the string. This function uses lua_tolstring to get its result, so all
/// conversions and caveats of that function apply here.
///
/// If the string is not utf-8, returns None.
pub fn checkstring<'a>(&'a mut self, narg: i32) -> Option<&'a str> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().checkstring(narg)) }
}
/// Checks whether the function argument `narg` is a lua string, and
/// returns it as a byte vector. See checkstring() for caveats.
pub fn checkbytes<'a>(&'a mut self, narg: i32) -> &'a [u8] {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().checkbytes(narg)) }
}
/// If the function argument `narg` is a string, returns this string. If
/// this argument is absent or is nil, returns `d`. Otherwise, raises an
/// error.
///
/// If the argument is a string, but is not utf-8, returns None.
pub fn optstring<'a>(&'a mut self, narg: i32, d: &'a str) -> Option<&'a str> {
#![inline(always)]
unsafe {
let d = mem::transmute::<&'a str, &'static str>(d);
mem::transmute(self.as_extern().optstring(narg, d))
}
}
/// If the function argument `narg` is a lua string, returns this string
/// asa byte vector. See optstring() for more information.
pub fn optbytes<'a>(&'a mut self, narg: i32, d: &'a [u8]) -> &'a [u8] {
#![inline(always)]
unsafe {
let d = mem::transmute::<&'a [u8], &'static [u8]>(d);
mem::transmute(self.as_extern().optbytes(narg, d))
}
}
/// Checks whether the function argument `narg` is a number and returns the
/// number.
pub fn checknumber(&mut self, narg: i32) -> f64 {
#![inline(always)]
unsafe { self.as_extern().checknumber(narg) }
}
/// If the function argument `narg` is a number, returns this number. If
/// the argument is absent or is nil, returns `d`. Otherwise, throws an
/// error.
pub fn optnumber(&mut self, narg: i32, d: f64) -> f64 {
#![inline(always)]
unsafe { self.as_extern().optnumber(narg, d) }
}
/// Checks whether the function argument `narg` is a number and returns it
/// as an isize.
pub fn checkinteger(&mut self, narg: i32) -> isize {
#![inline(always)]
unsafe { self.as_extern().checkinteger(narg) }
}
/// If the function argument `narg` is a number, returns this number cast
/// to an isize. If this argument is absent or nil, returns `d`. Otherwise,
/// raises an error.
pub fn optinteger(&mut self, narg: i32, d: isize) -> isize {
#![inline(always)]
unsafe { self.as_extern().optinteger(narg, d) }
}
/// Checks whether the function argument `narg` has type `t`.
pub fn checktype(&mut self, narg: i32, t: Type) {
#![inline(always)]
unsafe { self.as_extern().checktype(narg, t) }
}
/// Checks whether the function has an argument of any type (including nil)
/// at position `narg`.
pub fn checkany(&mut self, narg: i32) {
#![inline(always)]
unsafe { self.as_extern().checkany(narg) }
}
/// If the registry already has the key `tname`, returns `false`.
/// Otherwise, creates a new table to be used as a metatable for userdata,
/// adds it to the registry with key `tname`, and returns `true`.
///
/// In both cases pushes onto the stack the final value associated with
/// `tname` in the registry.
pub fn newmetatable(&mut self, tname: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().newmetatable(tname) }
}
/// Checks whether the function argument `narg` is a userdata of the type
/// `tname` (see newmetatable()). The userdata pointer is returned.
pub fn checkudata(&mut self, narg: i32, tname: &str) -> *mut libc::c_void {
#![inline(always)]
unsafe { self.as_extern().checkudata(narg, tname) }
}
/// Pushes onto the stack a string identifying the current position of the
/// control at level `lvl` in the call stack.
/// Level 0 is the running function, level 1 is the function that called
/// the running function, etc.
pub fn where_(&mut self, lvl: i32) {
#![inline(always)]
unsafe { self.as_extern().where_(lvl) }
}
/// Raises an error with the given string.
/// It also adds at the beginning of the message the file name and line
/// number where the error occurred, if this information is available.
pub fn errorstr(&mut self, s: &str) -> ! {
#![inline(always)]
unsafe { self.as_extern().errorstr(s) }
}
/// Checks whether the function arg `narg` is a string and searches for
/// this string in `lst`. The first element of each tuple is compared
/// against, and if a match is found, the second element is returned.
/// Raises an error if the argument is not a string or the string cannot be
/// found.
///
/// If `def` is not None, the function uses `def` as a default value when
/// there is no argument `narg` or this argument is nil.
///
/// Fails the task if `def` or any list key has interior NULs
pub fn checkoption<'a, T>(&mut self, narg: i32, def: Option<&str>, lst: &'a [(&str,T)])
-> &'a T {
#![inline(always)]
unsafe { self.as_extern().checkoption(narg, def, lst) }
}
/// Creates and returns a reference, in the table at index `t`, for the
/// object at the top of the stack (and pops the object).
///
/// A reference is a unique integer key. As long as you do not manually add
/// integer keys into table `t`, ref_() ensures the uniqueness of the key
/// it returns. You can retrieve an object referred by reference `r` by
/// calling `L.rawget(t, r)`. Method unref() frees a reference and its
/// associated object.
///
/// If the object at the top of the stack is nil, ref_() returns the
/// constant RefNil. The constant NoRef is guaranteed to be different from
/// any reference returned by ref_().
pub fn ref_(&mut self, t: i32) -> i32 {
#![inline(always)]
unsafe { self.as_extern().ref_(t) }
}
/// Releases reference `r` from the table at index `t` (see ref_()). The
/// entry is removed from the table, so that the referred object can be
/// collected. The reference `r` is also freed to be used again.
///
/// If ref is NoRef or RefNil, unref() does nothing.
pub fn unref(&mut self, t: i32, r: i32) {
#![inline(always)]
unsafe { self.as_extern().unref(t, r) }
}
/// Loads a file as a Lua chunk (but does not run it).
/// If the `filename` is None, this loads from standard input.
/// Fails the task if `filename` has any interior NULs.
pub fn loadfile(&mut self, filename: Option<&path::Path>) -> Result<(),LoadFileError> {
#![inline(always)]
unsafe { self.as_extern().loadfile(filename) }
}
/// Loads a buffer as a Lua chunk (but does not run it).
/// As far as Rust is concerned, this differ from loadstring() in that a
/// name for the chunk is provided. It also allows for NUL bytes, but I
/// expect Lua won't like those.
/// Fails the task if `name` has any interior NULs.
pub fn loadbuffer(&mut self, buf: &str, name: &str) -> Result<(),LoadError> {
#![inline(always)]
unsafe { self.as_extern().loadbuffer(buf, name) }
}
/// Loads a string as a Lua chunk (but does not run it).
/// Fails the task if `s` has any interior NULs.
pub fn loadstring(&mut self, s: &str) -> Result<(),LoadError> {
#![inline(always)]
unsafe { self.as_extern().loadstring(s) }
}
/// Creates a copy of string `s` by replacing any occurrence of the string
/// `p` with the string `r`. Pushes the resulting string on the stack and
/// returns it.
pub fn gsub<'a>(&'a mut self, s: &str, p: &str, r: &str) -> &'a str {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().gsub(s, p, r)) }
}
/// Checks whether `cond` is true. If not, raises an error with the
/// following message, where `func` is retrieved from the call stack:
///
/// bad argument #<narg> to <func> (<extramsg>)
///
/// Fails the task if `extramsg` has interior NULs.
pub fn argcheck(&mut self, cond: bool, narg: i32, extramsg: &str) {
#![inline(always)]
unsafe { self.as_extern().argcheck(cond, narg, extramsg) }
}
/// Loads and runs the given file. It returns `true` if there are no errors
/// or `false` in case of errors.
pub fn dofile(&mut self, filename: Option<&path::Path>) -> bool {
#![inline(always)]
unsafe { self.as_extern().dofile(filename) }
}
/// Loads and runs the given string. It returns `true` if there are no
/// errors or `false` in case of errors.
pub fn dostring(&mut self, s: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().dostring(s) }
}
/// Pushes onto the stack the metatable associated with the name `tname` in
/// the registry (see newmetatable()).
pub fn getmetatable_reg(&mut self, tname: &str) {
#![inline(always)]
unsafe { self.as_extern().getmetatable_reg(tname) }
}
/// Initializes and returns a Buffer
pub fn buffinit<'a>(&'a mut self) -> Buffer<'a> {
#![inline(always)]
self.as_extern().buffinit()
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub unsafe fn registerlib(&mut self, libname: Option<&str>, l: &[(&str,CFunction)]) {
// internally, luaL_registerlib seems to use 4 stack slots
self.checkstack_(4);
if libname.is_none() {
luaassert!(self, self.gettop() >= 1, "registerlib: stack underflow");
}
self.as_raw().registerlib(libname, l)
}
pub unsafe fn getmetafield(&mut self, obj: i32, e: &str) -> bool {
self.check_acceptable(obj);
self.checkstack_(2); // internally, luaL_getmetafield uses 2 stack slots
self.as_raw().getmetafield(obj, e)
}
pub unsafe fn callmeta(&mut self, obj: i32, e: &str) -> bool {
self.check_acceptable(obj);
self.checkstack_(2); // internally, luaL_callmeta uses 2 stack slots
self.as_raw().callmeta(obj, e)
}
pub unsafe fn typerror(&mut self, narg: i32, tname: &str) -> ! {
self.check_acceptable(narg);
// NB: stack checking is not necessary
self.as_raw().typerror(narg, tname)
}
pub unsafe fn argerror(&mut self, narg: i32, extramsg: &str) -> ! {
// NB: stack checking is not necessary
self.as_raw().argerror(narg, extramsg)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn checkstring(&mut self, narg: i32) -> Option<&'static str> {
self.check_acceptable(narg);
self.as_raw().checkstring(narg)
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn checkbytes(&mut self, narg: i32) -> &'static [u8] {
self.check_acceptable(narg);
self.as_raw().checkbytes(narg)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn optstring(&mut self, narg: i32, d: &'static str) -> Option<&'static str> {
self.check_acceptable(narg);
self.as_raw().optstring(narg, d)
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn optbytes(&mut self, narg: i32, d: &'static [u8]) -> &'static [u8] {
self.check_acceptable(narg);
self.as_raw().optbytes(narg, d)
}
pub unsafe fn checknumber(&mut self, narg: i32) -> f64 {
self.check_acceptable(narg);
self.as_raw().checknumber(narg)
}
pub unsafe fn optnumber(&mut self, narg: i32, d: f64) -> f64 {
self.check_acceptable(narg);
self.as_raw().optnumber(narg, d)
}
pub unsafe fn checkinteger(&mut self, narg: i32) -> isize {
self.check_acceptable(narg);
self.as_raw().checkinteger(narg)
}
pub unsafe fn optinteger(&mut self, narg: i32, d: isize) -> isize {
self.check_acceptable(narg);
self.as_raw().optinteger(narg, d)
}
pub unsafe fn checktype(&mut self, narg: i32, t: Type) {
self.check_acceptable(narg);
self.as_raw().checktype(narg, t)
}
pub unsafe fn checkany(&mut self, narg: i32) {
self.check_acceptable(narg);
self.as_raw().checkany(narg)
}
pub unsafe fn newmetatable(&mut self, tname: &str) -> bool {
self.checkstack_(2); // uses 1 or 2 stack slots internally
self.as_raw().newmetatable(tname)
}
pub unsafe fn checkudata(&mut self, narg: i32, tname: &str) -> *mut libc::c_void {
self.check_acceptable(narg);
self.checkstack_(2); // uses 2 stack slots internally
self.as_raw().checkudata(narg, tname)
}
pub unsafe fn where_(&mut self, lvl: i32) {
// luaL_where() internally uses lua_pushfstring(), which manages stack size itself
// so we don't need to call checkstack()
self.as_raw().where_(lvl)
}
pub unsafe fn errorstr(&mut self, s: &str) -> ! {
self.checkstack_(2);
self.as_raw().errorstr(s)
}
pub unsafe fn checkoption<'a, T>(&mut self, narg: i32, def: Option<&str>, lst: &'a [(&str,T)])
-> &'a T {
self.check_acceptable(narg);
self.as_raw().checkoption(narg, def, lst)
}
pub unsafe fn ref_(&mut self, t: i32) -> i32 {
self.check_valid(t, true);
self.checkstack_(1); // luaL_ref internally uses 1 stack slot
self.as_raw().ref_(t)
}
pub unsafe fn unref(&mut self, t: i32, r: i32) {
self.check_acceptable(t);
self.checkstack_(1); // luaL_unref internally uses 1 stack slot
self.as_raw().unref(t, r)
}
pub unsafe fn loadfile(&mut self, filename: Option<&path::Path>) -> Result<(),LoadFileError> {
self.checkstack_(1);
self.as_raw().loadfile(filename)
}
pub unsafe fn loadbuffer(&mut self, buf: &str, name: &str) -> Result<(),LoadError> {
self.checkstack_(1);
self.as_raw().loadbuffer(buf, name)
}
pub unsafe fn loadstring(&mut self, s: &str) -> Result<(),LoadError> {
self.checkstack_(1);
self.as_raw().loadstring(s)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn gsub(&mut self, s: &str, p: &str, r: &str) -> &'static str {
self.checkstack_(MINSTACK/2);
self.as_raw().gsub(s, p, r)
}
pub unsafe fn argcheck(&mut self, cond: bool, narg: i32, extramsg: &str) {
// NB: stack checking is not necessary
self.as_raw().argcheck(cond, narg, extramsg)
}
pub unsafe fn dofile(&mut self, filename: Option<&path::Path>) -> bool {
self.checkstack_(1);
self.as_raw().dofile(filename)
}
pub unsafe fn dostring(&mut self, s: &str) -> bool {
self.checkstack_(1);
self.as_raw().dostring(s)
}
pub unsafe fn getmetatable_reg(&mut self, tname: &str) {
self.as_raw().getmetatable_reg(tname)
}
pub fn buffinit<'a: 'l>(&'a mut self) -> Buffer<'a> {
#![inline]
let mut B = aux::raw::luaL_Buffer{
p: ptr::null_mut(),
lvl: 0,
L: self.L,
buffer: [0; aux::raw::LUAL_BUFFERSIZE as usize]
};
unsafe { aux::raw::luaL_buffinit(self.L, &mut B); }
Buffer{ B: B, L: self }
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub unsafe fn registerlib(&mut self, libname: Option<&str>, l: &[(&str,CFunction)]) {
#![inline]
let mut cstrs = Vec::with_capacity(l.len());
let mut l_ = Vec::with_capacity(l.len()+1);
for &(name, func) in l.iter() {
let cstr = CString::from_slice(name.as_bytes());
l_.push(aux::raw::luaL_Reg{ name: cstr.as_ptr(), func: Some(func) });
cstrs.push(cstr);
}
l_.push(aux::raw::luaL_Reg{ name: ptr::null(), func: None });
let libcstr = libname.map(|s| CString::from_slice(s.as_bytes()));
let libname_ = libcstr.map_or(ptr::null(), |cstr| cstr.as_ptr());
aux::raw::luaL_register(self.L, libname_, l_.as_ptr())
}
pub unsafe fn getmetafield(&mut self, obj: i32, e: &str) -> bool {
#![inline]
let cstr = CString::from_slice(e.as_bytes());
aux::raw::luaL_getmetafield(self.L, obj as c_int, cstr.as_ptr()) != 0
}
pub unsafe fn callmeta(&mut self, obj: i32, e: &str) -> bool {
#![inline]
let cstr = CString::from_slice(e.as_bytes());
aux::raw::luaL_callmeta(self.L, obj as c_int, cstr.as_ptr()) != 0
}
pub unsafe fn typerror(&mut self, narg: i32, tname: &str) -> ! {
#![inline]
let cstr = CString::from_slice(tname.as_bytes());
aux::raw::luaL_typerror(self.L, narg as c_int, cstr.as_ptr());
unreachable!()
}
pub unsafe fn argerror(&mut self, narg: i32, extramsg: &str) -> ! {
#![inline]
let cstr = CString::from_slice(extramsg.as_bytes());
aux::raw::luaL_argerror(self.L, narg as c_int, cstr.as_ptr());
unreachable!()
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
// TODO: change return type to use core::str::Utf8Error
pub unsafe fn checkstring(&mut self, narg: i32) -> Option<&'static str> {
#![inline]
str::from_utf8(self.checkbytes(narg)).ok()
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn checkbytes(&mut self, narg: i32) -> &'static [u8] {
#![inline]
let mut sz: libc::size_t = 0;
let s = aux::raw::luaL_checklstring(self.L, narg, &mut sz);
let buf = s as *const u8;
slice::from_raw_parts(buf, sz as usize)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
// TODO: change return type to use core::str::Utf8Error
pub unsafe fn optstring(&mut self, narg: i32, d: &'static str) -> Option<&'static str> {
#![inline]
str::from_utf8(self.optbytes(narg, d.as_bytes())).ok()
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn optbytes(&mut self, narg: i32, d: &'static [u8]) -> &'static [u8] {
#![inline]
let mut sz: libc::size_t = 0;
let cstr = CString::from_slice(d);
let s = aux::raw::luaL_optlstring(self.L, narg, cstr.as_ptr(), &mut sz);
let buf = s as *const u8;
slice::from_raw_parts(buf, sz as usize)
}
pub unsafe fn checknumber(&mut self, narg: i32) -> f64 {
#![inline]
aux::raw::luaL_checknumber(self.L, narg as c_int) as f64
}
pub unsafe fn optnumber(&mut self, narg: i32, d: f64) -> f64 {
#![inline]
aux::raw::luaL_optnumber(self.L, narg as c_int, d as raw::lua_Number) as f64
}
pub unsafe fn checkinteger(&mut self, narg: i32) -> isize {
#![inline]
aux::raw::luaL_checkinteger(self.L, narg as c_int) as isize
}
pub unsafe fn optinteger(&mut self, narg: i32, d: isize) -> isize {
#![inline]
aux::raw::luaL_optinteger(self.L, narg as c_int, d as raw::lua_Integer) as isize
}
pub unsafe fn checktype(&mut self, narg: i32, t: Type) {
#![inline]
aux::raw::luaL_checktype(self.L, narg as c_int, t as c_int)
}
pub unsafe fn checkany(&mut self, narg: i32) {
#![inline]
aux::raw::luaL_checkany(self.L, narg as c_int)
}
pub unsafe fn newmetatable(&mut self, tname: &str) -> bool {
#![inline]
let cstr = CString::from_slice(tname.as_bytes());
aux::raw::luaL_newmetatable(self.L, cstr.as_ptr()) != 0
}
pub unsafe fn checkudata(&mut self, narg: i32, tname: &str) -> *mut libc::c_void {
#![inline]
let cstr = CString::from_slice(tname.as_bytes());
aux::raw::luaL_checkudata(self.L, narg as c_int, cstr.as_ptr())
}
pub unsafe fn where_(&mut self, lvl: i32) {
#![inline]
aux::raw::luaL_where(self.L, lvl as c_int)
}
pub unsafe fn errorstr(&mut self, s: &str) -> ! {
#![inline]
self.where_(1);
self.pushstring(s);
self.concat(2);
raw::lua_error(self.L);
unreachable!()
}
pub unsafe fn checkoption<'a, T>(&mut self, narg: i32, def: Option<&str>, lst: &'a [(&str,T)])
-> &'a T {
let def_cstr = def.map(|d| CString::from_slice(d.as_bytes()));
let defp = def_cstr.as_ref().map_or(ptr::null(), |c| c.as_ptr());
let mut lst_cstrs = Vec::with_capacity(lst.len());
let mut lstv = Vec::with_capacity(lst.len()+1);
for &(k,_) in lst.iter() {
let cstr = CString::from_slice(k.as_bytes());
lstv.push(cstr.as_ptr());
lst_cstrs.push(cstr);
}
lstv.push(ptr::null());
let i = aux::raw::luaL_checkoption(self.L, narg as c_int, defp, lstv.as_ptr()) as usize;
&lst[i].1
}
pub unsafe fn ref_(&mut self, t: i32) -> i32 {
#![inline]
aux::raw::luaL_ref(self.L, t as c_int) as i32
}
pub unsafe fn unref(&mut self, t: i32, r: i32) {
#![inline]
aux::raw::luaL_unref(self.L, t as c_int, r as c_int)
}
pub unsafe fn loadfile(&mut self, filename: Option<&path::Path>) -> Result<(),LoadFileError> {
#![inline]
let cstr = filename.map(|p| CString::from_slice(p.as_vec()));
let ptr = cstr.as_ref().map_or(ptr::null(), |cstr| cstr.as_ptr());
match aux::raw::luaL_loadfile(self.L, ptr) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadFileError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadFileError::ErrMem),
aux::raw::LUA_ERRFILE => Err(LoadFileError::ErrFile),
_ => self.errorstr("loadfile: unexpected error from luaL_loadfile")
}
}
pub unsafe fn loadbuffer(&mut self, buf: &str, name: &str) -> Result<(),LoadError> {
#![inline]
let bp = buf.as_ptr() as *const libc::c_char;
let bsz = buf.len() as libc::size_t;
let cstr = CString::from_slice(name.as_bytes());
match aux::raw::luaL_loadbuffer(self.L, bp, bsz, cstr.as_ptr()) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadError::ErrMem),
_ => self.errorstr("loadbuffer: unexpected error from luaL_loadbuffer")
}
}
pub unsafe fn loadstring(&mut self, s: &str) -> Result<(),LoadError> {
#![inline]
let cstr = CString::from_slice(s.as_bytes());
match aux::raw::luaL_loadstring(self.L, cstr.as_ptr()) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadError::ErrMem),
_ => self.errorstr("loadstring: unexpected error from luaL_loadstring")
}
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn gsub(&mut self, s: &str, p: &str, r: &str) -> &'static str {
#![inline]
let (s_, p_, r_) = (CString::from_slice(s.as_bytes()),
CString::from_slice(p.as_bytes()),
CString::from_slice(r.as_bytes()));
let (sp, pp, rp) = (s_.as_ptr(), p_.as_ptr(), r_.as_ptr());
let res = aux::raw::luaL_gsub(self.L, sp, pp, rp);
let cstr = ffi::c_str_to_bytes(&res);
let res = str::from_utf8(cstr).unwrap();
mem::transmute::<&str,&'static str>(res)
}
pub unsafe fn argcheck(&mut self, cond: bool, narg: i32, extramsg: &str) {
#![inline]
let cstr = CString::from_slice(extramsg.as_bytes());
aux::raw::luaL_argcheck(self.L, cond, narg as c_int, cstr.as_ptr())
}
pub unsafe fn dofile(&mut self, filename: Option<&path::Path>) -> bool {
#![inline]
let cstr = filename.map(|p| CString::from_slice(p.as_vec()));
let name = cstr.map_or(ptr::null(), |c| c.as_ptr());
aux::raw::luaL_dofile(self.L, name) == 0
}
pub unsafe fn dostring(&mut self, s: &str) -> bool {
#![inline]
aux::raw::luaL_dostring(self.L, CString::from_slice(s.as_bytes()).as_ptr()) == 0
}
pub unsafe fn getmetatable_reg(&mut self, tname: &str) {
#![inline]
self.getfield(REGISTRYINDEX, tname)
}
}
/// String buffer for building Lua strings piecemeal.
///
/// The Buffer assumes it needs longjmp safety, like ExternState.
pub struct Buffer<'a> {
B: aux::raw::luaL_Buffer,
/// A &mut pointer to the ExternState that created this Buffer.
/// The buffer internally holds on to the *lua_Buffer that the State wraps,
/// so to ensure safety it also borrows the &mut ExternState. Use this
/// field to get mutable access to the State while the buffer is alive.
pub L: &'a mut ExternState<'a>
}
/// Size of the internal buffer used by Buffer and returned by prepbuffer()
pub const BUFFERSIZE: usize = aux::raw::LUAL_BUFFERSIZE as usize;
impl<'a> Buffer<'a> {
/// Adds the byte `c` to the buffer.
pub unsafe fn addbyte(&mut self, c: u8) {
#![inline]
// don't call through to luaL_addchar, because we want to insert a call to checkstack()
// iff we have to prep the buffer.
let startp: *mut libc::c_char = &mut self.B.buffer[0];
if self.B.p >= startp.offset(aux::raw::LUAL_BUFFERSIZE as isize) {
self.L.checkstack_(1);
aux::raw::luaL_prepbuffer(&mut self.B);
}
*self.B.p = c as libc::c_char;
self.B.p = self.B.p.offset(1);
}
/// Adds the char `c` as utf-8 bytes to the buffer.
pub unsafe fn addchar(&mut self, c: char) {
#![inline]
let mut buf = [0u8; 4];
let count = c.encode_utf8(&mut buf).unwrap();
self.addbytes(&buf[..count]);
}
/// Adds to the buffer a string of length `n` previously copied to the
/// buffer area (see prepbuffer()).
pub unsafe fn addsize(&mut self, n: usize) {
#![inline]
aux::raw::luaL_addsize(&mut self.B, n as libc::size_t)
}
/// Returns a pointer to an array of size BUFFERSIZE where you can copy a
/// string to be added to the buffer. After copying the string into this
/// space you must call addsize() with the size of the string to actually
/// add it to the buffer.
pub unsafe fn prepbuffer(&mut self) -> &mut [u8; aux::raw::LUAL_BUFFERSIZE as usize] {
#![inline]
self.L.checkstack_(1);
// luaL_prepbuffer ends up returning the buffer field.
// Rather than unsafely trying to transmute that to the array, just return the field
// ourselves.
aux::raw::luaL_prepbuffer(&mut self.B);
mem::transmute::<&mut [i8; aux::raw::LUAL_BUFFERSIZE as usize],
&mut [u8; aux::raw::LUAL_BUFFERSIZE as usize]>(&mut self.B.buffer)
}
/// Adds the string to the buffer.
pub unsafe fn addstring(&mut self, s: &str) {
#![inline]
self.addbytes(s.as_bytes())
}
/// Adds the byte vector to the buffer.
pub unsafe fn addbytes(&mut self, bytes: &[u8]) {
#![inline]
// luaL_addlstring() just iterates over the string calling addchar().
// We want our checkstack calls, so let's just do that here instead directly.
for &b in bytes.iter() {
self.addbyte(b);
}
}
/// Adds the value at the top of the stack to the buffer. Pops the value.
///
/// This is the only method on string buffers that can (and must) be called
/// with an extra element on the stack, which is the value to be added to
/// the buffer.
pub unsafe fn addvalue(&mut self) {
#![inline]
luaassert!(self.L, self.L.gettop() >= 1, "addvalue: stack underflow");
self.L.checkstack_(1); // luaL_addvalue() needs this if the value is too large
aux::raw::luaL_addvalue(&mut self.B)
}
/// Finishes the use of the buffer, leaving the final string on top of the
/// stack.
pub unsafe fn pushresult(mut self) {
#![inline]
self.L.checkstack_(1); // possibly needed for the emptybuffer
aux::raw::luaL_pushresult(&mut self.B)
}
}
/* Debug API */
/// Event codes
#[derive(Copy)]
pub enum DebugEvent {
/// The call hook is called when the interpreter calls a function. The hook is called
/// just after Lua enters the new function, before the function gets its arguments.
HookCall = raw::LUA_HOOKCALL as isize,
/// The return hook is called when the interpreter returns from a function. The hook is
/// called just before Lua leaves the function. You have no access to the values to be
/// returned by the function.
HookRet = raw::LUA_HOOKRET as isize,
/// The line hook is called when the interpreter is about to start the execution of a new
/// line of code, or when it jumps back in the code (even to the same line).
/// (This event only happens while Lua is executing a Lua function.)
HookLine = raw::LUA_HOOKLINE as isize,
/// The count hook is called after the interpreter executes every `count` instructions.
/// (This event only happens while Lua is executing a Lua function.)
HookCount = raw::LUA_HOOKCOUNT as isize,
/// The tailret event is used when a HookRet hook is called while simulating a return from
/// a function that did a tail call; in this case, it is useless to call getinfo().
HookTailRet = raw::LUA_HOOKTAILRET as isize
}
impl DebugEvent {
/// Converts a c_int event code to a DebugEvent.
pub fn from_event(event: c_int) -> Option<DebugEvent> {
match event {
raw::LUA_HOOKCALL => Some(DebugEvent::HookCall),
raw::LUA_HOOKRET => Some(DebugEvent::HookRet),
raw::LUA_HOOKLINE => Some(DebugEvent::HookLine),
raw::LUA_HOOKCOUNT => Some(DebugEvent::HookCount),
raw::LUA_HOOKTAILRET => Some(DebugEvent::HookTailRet),
_ => None
}
}
}
/// Event mask for HookCall
pub const MASKCALL: i32 = raw::LUA_MASKCALL as i32;
/// Event mask for HookRet
pub const MASKRET: i32 = raw::LUA_MASKRET as i32;
/// Event mask for HookLine
pub const MASKLINE: i32 = raw::LUA_MASKLINE as i32;
/// Event mask for HookCount
pub const MASKCOUNT: i32 = raw::LUA_MASKCOUNT as i32;
/// Type for functions to be called by the debugger in specific events
pub type Hook = raw::lua_Hook;
/// A structure used to carry different peices of information about an active function.
/// getstack() fills only the private part of this structure, for later use. To fill the other
/// fields of lua_Debug with useful information, call getinfo().
pub type Debug = raw::lua_Debug;
impl Debug {
/// Returns a newly-zeroed instance of Debug
pub fn new() -> Debug {
#![inline]
std::default::Default::default()
}
}
impl State {
/// Gets information about the interpreter runtime stack.
///
/// This function returns a Debug structure with an identification of the
/// activation record of the function executing at a given level. Level 0
/// is the current running function, whereas level n+1 is the function that
/// has called level n. When there are no errors, getstack() returns
/// Some(Debug); when called with a level greater than the stack depth, it
/// returns None.
pub fn getstack(&mut self, level: i32) -> Option<Debug> {
#![inline(always)]
self.as_extern().getstack(level)
}
/// Returns information about a specific function or function invocation.
///
/// To get information about a function invocation, the parameter `ar` must
/// ve a valid activation record that was returned by a previous call to
/// getstack() or given as argument to a hook.
///
/// To get information about a function you push it onto the stack and
/// start the `what` string with the character '>'. (In that case,
/// getinfo() pops the function in the top of the stack.) For instance, to
/// know in which line a function `f` was defined, you can write the
/// following code:
///
/// let ar = Debug::new();
/// L.getfield(GLOBALSINDEX, "f"); // get global 'f'
/// L.getinfo(">S", &mut ar);
/// println!("{}", ar.linedefined);
///
/// Each character in the string `what` selects some fields of the
/// structure `ar` to be filled or a value to be pushed on the stack:
///
/// * 'n': fills in the fields `name` and `namewhat`
/// * 'S': fills in the fields `source`, `short_src`, `linedefined`,
/// `lastlinedefined`, and `what`
/// * 'l': fills in the field `currentline`
/// * 'u': fills in the field `nups`
/// * 'f': pushes onto the stack the function that is running at the given
/// level
/// * 'L': pushes onto the stack a table whose indices are the numbers of
/// the lines that are valid on the function. (A valid line is a
/// line with some associated code, that is, a line where you can
/// put a break point. Non-valid lines include empty lines and
/// comments.)
///
/// This function returns `false` on error (for instance, an invalid option
/// in `what`).
///
/// Fails the task if `what` has interior NULs.
pub fn getinfo(&mut self, what: &str, ar: &mut Debug) -> bool {
#![inline(always)]
unsafe { self.as_extern().getinfo(what, ar) }
}
/// Gets information about a local variable of a given activation record.
/// The parameter `ar` must be a valid activation record that was filled by
/// a previous call to getstack() or given as an argument to a hook. The
/// index `n` selects which local variable to inspect (1 is the first
/// parameter or active local variable, and so on, until the last active
/// local variable). getlocal() pushes the variable's value onto the stack
/// and returns its name.
///
/// Variable names starting with '(' represent internal variables (loop
/// control variables, temporaries, and C function locals).
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn getlocal<'a>(&mut self, ar: &'a Debug, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().getlocal(ar, n) }
}
/// Sets the value of a local variable of a given activation record.
/// Parameters `ar` and `n` are as in getlocal(). setlocal() assigns the
/// value at the top of the stack to the variable and returns its name. It
/// also pops the value from the stack.
///
/// Returns None (and pops nothing) when the index is greater than the
/// number of active local variables.
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn setlocal<'a>(&mut self, ar: &'a mut Debug, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().setlocal(ar, n) }
}
/// Gets information about a closure's upvalue. (For Lua functions,
/// upvalues are the external local variables that the function uses, and
/// that are consequently included in its closure.) getupvalue() gets the
/// index `n` of an upvalue, pushes the upvalue's value onto the stack, and
/// returns its name. `funcindex` points to the closure in the stack.
/// (Upvalues have no particular order, as they are active through the
/// whole function. So, they are numbered in an arbitrary order.)
///
/// Returns None (and pushes nothing) when the index is greater than the
/// number of upvalues. For C functions, this function uses the empty
/// string "" as a name for all upvalues.
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn getupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().getupvalue(funcidx, n) }
}
/// Sets the value of a closure's upvalue. It assigns the value at the top
/// of the stack to the upvalue and returns its name. It also pops the
/// value from the stack. Parameters `funcindex` and `n` are as in
/// getupvalue().
///
/// Returns None (and pops nothing) when the index is greater than the
/// number of upvalues.
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn setupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().setupvalue(funcidx, n) }
}
/// Sets the debugging hook function.
///
/// Argument `f` is the hook function. `mask` specifies on which events the
/// hook will be called: it is formed by a bitwise OR of the Mask*
/// constants in DebugEvent. The `count` argument is only meaningful when
/// the mask includes DebugEvent::MaskCount.
///
/// A hook is disabled by setting `mask` to zero.
pub fn sethook(&mut self, f: Hook, mask: i32, count: i32) {
#![inline(always)]
self.as_extern().sethook(f, mask, count)
}
/// Returns the current hook function
pub fn gethook(&mut self) -> Hook {
#![inline(always)]
self.as_extern().gethook()
}
/// Returns the current hook mask
pub fn gethookmask(&mut self) -> i32 {
#![inline(always)]
self.as_extern().gethookmask()
}
/// Returns the current hook count
pub fn gethookcount(&mut self) -> i32 {
#![inline(always)]
self.as_extern().gethookcount()
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub fn getstack(&mut self, level: i32) -> Option<Debug> {
self.as_raw().getstack(level)
}
pub unsafe fn getinfo(&mut self, what: &str, ar: &mut Debug) -> bool {
if what.starts_with(">") {
luaassert!(self, self.gettop() >= 1 && self.isfunction(-1),
"getinfo: top stack value is not a function");
}
if what.find(['f', 'L'].as_slice()).is_some() {
self.checkstack_(1);
}
self.as_raw().getinfo(what, ar)
}
pub unsafe fn getlocal<'a>(&mut self, ar: &'a Debug, n: i32) -> Option<&'a [u8]> {
self.checkstack_(1);
self.as_raw().getlocal(ar, n)
}
pub unsafe fn setlocal<'a>(&mut self, ar: &'a mut Debug, n: i32) -> Option<&'a [u8]> {
luaassert!(self, self.gettop() >= 1, "setlocal: stack underflow");
self.as_raw().setlocal(ar, n)
}
pub unsafe fn getupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
self.check_acceptable(funcidx);
self.checkstack_(1);
self.as_raw().getupvalue(funcidx, n)
}
pub unsafe fn setupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
self.check_acceptable(funcidx);
self.checkstack_(1);
self.as_raw().setupvalue(funcidx, n)
}
pub fn sethook(&mut self, f: Hook, mask: i32, count: i32) {
self.as_raw().sethook(f, mask, count)
}
pub fn gethook(&mut self) -> Hook {
self.as_raw().gethook()
}
pub fn gethookmask(&mut self) -> i32 {
self.as_raw().gethookmask()
}
pub fn gethookcount(&mut self) -> i32 {
self.as_raw().gethookcount()
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub fn getstack(&mut self, level: i32) -> Option<Debug> {
#![inline]
let mut ar: Debug = std::default::Default::default();
if unsafe { raw::lua_getstack(self.L, level as c_int, &mut ar) != 0 } {
Some(ar)
} else {
None
}
}
pub unsafe fn getinfo(&mut self, what: &str, ar: &mut Debug) -> bool {
#![inline]
raw::lua_getinfo(self.L, CString::from_slice(what.as_bytes()).as_ptr(), ar) != 0
}
pub unsafe fn getlocal<'a>(&mut self, ar: &'a Debug, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_getlocal(self.L, ar, n as c_int);
c_str_to_bytes(res)
}
pub unsafe fn setlocal<'a>(&mut self, ar: &'a mut Debug, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_setlocal(self.L, ar, n as c_int);
c_str_to_bytes(res)
}
pub unsafe fn getupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_getupvalue(self.L, funcidx as c_int, n as c_int);
c_str_to_bytes(res)
}
pub unsafe fn setupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_setupvalue(self.L, funcidx as c_int, n as c_int);
c_str_to_bytes(res)
}
pub fn sethook(&mut self, f: Hook, mask: i32, count: i32) {
#![inline]
unsafe { raw::lua_sethook(self.L, f, mask as c_int, count as c_int); }
}
pub fn gethook(&mut self) -> Hook {
#![inline]
unsafe { raw::lua_gethook(self.L) }
}
pub fn gethookmask(&mut self) -> i32 {
#![inline]
unsafe { raw::lua_gethookmask(self.L) as i32 }
}
pub fn gethookcount(&mut self) -> i32 {
#![inline]
unsafe { raw::lua_gethookcount(self.L) as i32 }
}
}
unsafe fn c_str_to_bytes<'a>(cstr: *const libc::c_char) -> Option<&'a [u8]> {
#![inline]
if cstr.is_null() {
None
} else {
let bytes = ffi::c_str_to_bytes(&cstr);
Some(mem::transmute::<&[u8],&'a [u8]>(bytes))
}
}
Fix deprecation warnings
Migrate to the new CStr API.
//! Lua 5.1 bindings for Rust
#![crate_name = "lua"]
#![crate_type = "rlib"]
#![warn(missing_docs)]
#![allow(non_snake_case)]
#![feature(libc,core,std_misc,old_path,unicode,unsafe_no_drop_flag)]
extern crate libc;
use libc::c_int;
use std::{fmt, mem, ptr, str, slice};
use std::old_path as path;
use std::ffi::{CStr, CString};
use std::marker;
use std::num::SignedInt;
/// Human-readable major version string
pub const VERSION: &'static str = config::LUA_VERSION;
/// Human-readable release version string
pub const RELEASE: &'static str = config::LUA_RELEASE;
/// Machine-readable version number
pub const VERSION_NUM: isize = config::LUA_VERSION_NUM as isize;
/// Value for lua_call that means return all results
pub const MULTRET: i32 = raw::MULTRET as i32;
/// Minimum Lua stack available to a C function
pub const MINSTACK: i32 = config::LUA_MINSTACK as i32;
/// Pseudo-index for the registry
pub const REGISTRYINDEX: i32 = raw::LUA_REGISTRYINDEX as i32;
/// Pseudo-index for the thread environment
pub const GLOBALSINDEX: i32 = raw::LUA_GLOBALSINDEX as i32;
/// Pseudo-index for the running C function environment
pub const ENVIRONINDEX: i32 = raw::LUA_ENVIRONINDEX as i32;
/// Calculates the pseudo-index for the upvalue at the given index.
/// Any index in the range [1,256] produces an acceptable index.
/// Any index outside that range will likely produce an unacceptable index.
pub fn upvalueindex(n: i32) -> i32 {
#![inline]
raw::lua_upvalueindex(n as c_int) as i32
}
include!(concat!(env!("OUT_DIR"), "/config.rs"));
#[allow(missing_docs)]
pub mod raw;
#[allow(missing_docs)]
#[path="aux_.rs"]
pub mod aux;
#[path = "lualib.rs"]
#[allow(missing_docs)]
pub mod lib;
#[path="macro.rs"]
mod macros;
#[cfg(test)]
mod tests;
macro_rules! luaassert{
($state:expr, $cond:expr, $msg:expr) => {
if !$cond {
$state.errorstr($msg.as_slice());
}
};
($state:expr, $cond:expr, $($arg:expr),+) => {
if !$cond {
let msg = format!($($arg),+);
$state.errorstr(msg.as_slice());
}
}
}
/// Lua value types
#[derive(Clone,Copy,PartialEq,Eq,Debug)]
pub enum Type {
/// Type for nil
Nil = raw::LUA_TNIL as isize,
/// Type for booleans
Boolean = raw::LUA_TBOOLEAN as isize,
/// Type for light userdata
LightUserdata = raw::LUA_TLIGHTUSERDATA as isize,
/// Type for numbers
Number = raw::LUA_TNUMBER as isize,
/// Type for strings
String = raw::LUA_TSTRING as isize,
/// Type for tables
Table = raw::LUA_TTABLE as isize,
/// Type for functions
Function = raw::LUA_TFUNCTION as isize,
/// Type for userdata
Userdata = raw::LUA_TUSERDATA as isize,
/// Type for threads
Thread = raw::LUA_TTHREAD as isize
}
impl Type {
/// Returns the name of the type
pub fn name(&self) -> &'static str {
unsafe {
// NB: lua_typename() doesn't actually use its state parameter
let cs = CStr::from_ptr(raw::lua_typename(ptr::null_mut(), *self as libc::c_int));
mem::transmute::<&str,&'static str>(str::from_utf8(cs.to_bytes()).unwrap())
}
}
}
/// Garbage collection options (used with State.gc())
//#[allow(dead_code)] // FIXME(rust-lang/rust#17632): dead_code warning is wrong here
#[derive(Copy)]
pub enum GC {
/// Stops the garbage collector
Stop = raw::LUA_GCSTOP as isize,
/// Restarts the garbage collector
Restart = raw::LUA_GCRESTART as isize,
/// Performs a full garbage-collection cycle
Collect = raw::LUA_GCCOLLECT as isize,
/// Returns the current amount of memory (in Kbytes) in use by Lua
Count = raw::LUA_GCCOUNT as isize,
/// Returns the remainder of dividing the current amount of bytes in memory in use by Lua
/// by 1024
CountB = raw::LUA_GCCOUNTB as isize,
/// Performs an incremental step of garbage collection. The step "size" is controlled by
/// `data` (larger values mean more steps) in a non-specified way. If you want to control
/// the step size you must experimentally tune hte value of `data`. The function returns
/// 1 if the step finished a garbage-collection cycle.
Step = raw::LUA_GCSTEP as isize,
/// Sets `data` as the new value for the pause of the collector. The function returns the
/// previous value of the pause.
SetPause = raw::LUA_GCSETPAUSE as isize,
/// Sets `data` as the new value for the step multiplier of the collector. The function
/// returns the previous value of the step multiplier.
SetStepMul = raw::LUA_GCSETSTEPMUL as isize
}
/// Type that represents C functions that can be registered with Lua.
pub type CFunction = raw::lua_CFunction;
/// Function type for reading blocks when loading Lua chunks.
pub type Reader = raw::lua_Reader;
/// Function type for writing blocks when dumping Lua chunks.
pub type Writer = raw::lua_Writer;
/// Type that represents memory-allocation functions
pub type Alloc = raw::lua_Alloc;
/// State.load() errors
#[derive(Copy)]
pub enum LoadError {
/// Syntax error during pre-compilation
ErrSyntax = raw::LUA_ERRSYNTAX as isize,
/// Memory allocation error
ErrMem = raw::LUA_ERRMEM as isize
}
impl fmt::Debug for LoadError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LoadError::ErrSyntax => f.pad("syntax error"),
LoadError::ErrMem => f.pad("memory allocation error")
}
}
}
/// State.loadfile() errors
#[derive(Copy)]
pub enum LoadFileError {
/// Syntax error during pre-compilation
ErrSyntax = raw::LUA_ERRSYNTAX as isize,
/// Memory allocation error
ErrMem = raw::LUA_ERRMEM as isize,
/// Cannot read/open the file
ErrFile = aux::raw::LUA_ERRFILE as isize
}
impl fmt::Debug for LoadFileError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LoadFileError::ErrSyntax => f.pad("syntax error"),
LoadFileError::ErrMem => f.pad("memory allocation error"),
LoadFileError::ErrFile => f.pad("file read/open error")
}
}
}
/// State.pcall() errors
#[derive(Copy)]
pub enum PCallError {
/// Runtime error
ErrRun = raw::LUA_ERRRUN as isize,
/// Memory allocation error
ErrMem = raw::LUA_ERRMEM as isize,
/// Error while running the error handler function
ErrErr = raw::LUA_ERRERR as isize
}
impl PCallError {
/// Converts an error code from `lua_pcall()` into a PCallError
pub fn from_code(code: c_int) -> Option<PCallError> {
match code {
raw::LUA_ERRRUN => Some(PCallError::ErrRun),
raw::LUA_ERRMEM => Some(PCallError::ErrMem),
raw::LUA_ERRERR => Some(PCallError::ErrErr),
_ => None,
}
}
}
impl fmt::Debug for PCallError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
PCallError::ErrRun => f.pad("runtime error"),
PCallError::ErrMem => f.pad("memory allocation error"),
PCallError::ErrErr => f.pad("error handler func error")
}
}
}
/// The Lua state.
/// Every Lua thread is represented by a separate State.
///
/// When executing functions on the State that take acceptable indexes, these
/// indexes are checked to ensure they are within the stack space defined by
/// the last call to State.checkstack(). If they are not acceptable, the
/// function fails without calling lua_checkstack(). Negative indices are
/// checked against the current top of the stack instead of the stack space.
///
/// Unless otherwise noted, all safe functions that take indexes will fail if
/// the index is not acceptable.
///
/// There are two variant state types, ExternState and RawState, that assume
/// different behavior for thrown errors. ExternState is meant for functions
/// that are executing in a protected scope (see pcall()), and RawState is
/// meant for omitting safety in favor of performance.
///
/// Note that it is completely unsafe to pass a reference to State to a
/// function that is executing in a protected scope. Use ExternState for that.
#[unsafe_no_drop_flag]
#[repr(C)]
pub struct State {
L: *mut raw::lua_State,
_stackspace: i32,
_marker: marker::PhantomData<raw::lua_State>
}
impl Drop for State {
fn drop(&mut self) {
if !self.L.is_null() {
unsafe {
raw::lua_close(self.L);
}
self.L = ptr::null_mut();
}
}
}
/// ExternState is a Lua State that was created from a raw::lua_State value.
/// Every error-throwing function is assumed to be using longjmp instead of
/// task failure.
///
/// See State for more information.
// NB: layout must be identical to State
// If Drop is ever implemented, add unsafe_no_drop_flag
#[repr(C)]
pub struct ExternState<'a> {
L: *mut raw::lua_State,
stackspace: i32,
_marker: marker::PhantomData<&'a mut raw::lua_State>
}
/// RawState is a Lua State that represents raw, unchecked access. All
/// functions eschew safety in favor of speed. Like ExternState, all
/// error-throwing functions are assumed to be using longjmp.
// NB: layout must be identical to State
// If Drop is ever implemented, add unsafe_no_drop_flag
#[repr(C)]
pub struct RawState<'a> {
L: *mut raw::lua_State,
stackspace: i32,
_marker: marker::PhantomData<&'a mut raw::lua_State>
}
// State construction
impl State {
/// Returns a new State, or fails if memory cannot be allocated for the state
pub fn new() -> State {
#![inline]
State::new_opt().unwrap()
}
/// Returns a new State, or None if memory cannot be allocated for the state
pub fn new_opt() -> Option<State> {
return unsafe {
let L = raw::lua_newstate(alloc, ptr::null_mut());
if !L.is_null() {
raw::lua_atpanic(L, panic);
Some(State{ L: L, _stackspace: MINSTACK, _marker: marker::PhantomData })
} else {
None
}
};
extern "C" fn alloc(_ud: *mut libc::c_void, ptr: *mut libc::c_void, _osize: libc::size_t,
nsize: libc::size_t) -> *mut libc::c_void {
unsafe {
if nsize == 0 {
libc::free(ptr as *mut libc::c_void);
ptr::null_mut()
} else {
libc::realloc(ptr, nsize)
}
}
}
extern "C" fn panic(L: *mut raw::lua_State) -> c_int {
unsafe {
let s = RawState::from_lua_State(L).describe_(-1, false);
panic!("unprotected error in call to Lua API ({})", s);
}
}
}
}
impl<'l> ExternState<'l> {
/// Wraps a *raw::lua_State in a ExternState.
pub unsafe fn from_lua_State(L: *mut raw::lua_State) -> ExternState<'static> {
#![inline]
ExternState{ L: L, stackspace: MINSTACK, _marker: marker::PhantomData }
}
}
impl<'l> RawState<'l> {
/// Wraps a *raw::lua_State in a RawState.
pub unsafe fn from_lua_State(L: *mut raw::lua_State) -> RawState<'static> {
#![inline]
RawState{ L: L, stackspace: MINSTACK, _marker: marker::PhantomData }
}
}
// State conversion
impl State {
/// Returns the same state as an ExternState
pub fn as_extern<'a>(&'a mut self) -> &'a mut ExternState<'a> {
#![inline]
unsafe { mem::transmute(self) }
}
/// Returns the same state as a RawState
pub fn as_raw<'a>(&'a mut self) -> &'a mut RawState<'a> {
#![inline]
unsafe { mem::transmute(self) }
}
}
impl<'a> ExternState<'a> {
/// Returns the same state as a RawState
pub fn as_raw(&mut self) -> &'a mut RawState<'a> {
#![inline]
unsafe { mem::transmute(self) }
}
}
impl State {
/// Provides unsafe access to the underlying *lua_State
pub unsafe fn get_lua_State(&mut self) -> *mut raw::lua_State {
#![inline]
self.L
}
}
impl<'l> ExternState<'l> {
/// Provides unsafe access to the underlying *lua_State
pub unsafe fn get_lua_State(&mut self) -> *mut raw::lua_State {
#![inline]
self.L
}
}
impl<'l> RawState<'l> {
/// Provides unsafe access to the underlying *lua_State
pub unsafe fn get_lua_State(&mut self) -> *mut raw::lua_State {
#![inline]
self.L
}
}
impl State {
/// Creates a new thread, pushes it on the stack, and returns a `State`
/// that represents this new thread. The new state returned by this
/// function shares with the original state all global objects (such as
/// tables), but has an independent execution stack.
///
/// This new state does not get explicitly closed. Threads are subject to
/// garbage collection, like any Lua object.
pub fn newthread(&mut self) -> State {
#![inline(always)]
unsafe { self.as_raw().newthread() }
}
/// Sets a new panic function and returns the old one.
///
/// The panic function can access the error message at the top of the stack.
///
/// The default panic function installed by this library calls panic!() with
/// the error message. Your panic function should either call through to
/// the default one, or should panic!() itself. Otherwise, the application
/// will be terminated.
pub unsafe fn atpanic(&mut self, panicf: CFunction) -> CFunction {
#![inline(always)]
self.as_raw().atpanic(panicf)
}
/// Returns the textual description of the value at the given acceptable index.
/// Returns "" if the given index is non-valid.
pub fn describe(&mut self, idx: i32) -> String {
#![inline(always)]
unsafe { self.as_extern().describe(idx) }
}
/// Variant of describe_() that does not push on to the stack. describe()
/// may push new values onto the stack temporarily. Notably, it may do this
/// to avoid converting the existing value's type. This method allows this
/// behavior to be disabled. If usestack is true, this method may require 1
/// free slot on the stack.
pub fn describe_(&mut self, idx: i32, usestack: bool) -> String {
#![inline(always)]
unsafe { self.as_extern().describe_(idx, usestack) }
}
/// Returns the index of the top element of the stack.
/// Indexes start at 1. 0 means the stack is empty.
pub fn gettop(&mut self) -> i32 {
#![inline(always)]
self.as_extern().gettop()
}
/// Sets the stack top to the given acceptable index, or 0.
/// If the new top is larger than the old one, new elements are filled with
/// nil.
/// If the index is 0, all stack elements are removed.
pub fn settop(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().settop(idx) }
}
/// Pushes a copy of the element at the given valid index onto the stack.
pub fn pushvalue(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().pushvalue(idx) }
}
/// Removes the element at the given valid index, shifting other elements
/// as needed.
/// Pseudo-indices are not valid for this call.
pub fn remove(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().remove(idx) }
}
/// Moves the top element into the given valid index, shifting existing
/// elements as needed.
/// Pseudo-indices are not valid for this call.
pub fn insert(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().insert(idx) }
}
/// Moves the top element into the given valid index and replaces the
/// existing value, without shifting any other elements.
pub fn replace(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().replace(idx) }
}
/// Ensures the stack contains at least `extra` free slots on the stack.
/// Returns false if it cannot grow the stack as requested.
pub fn checkstack(&mut self, extra: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().checkstack(extra) }
}
/// Ensures the stack contains at least `extra` free slots on the stack.
/// Throws an error if it cannot grow the stack.
pub fn checkstack_(&mut self, extra: i32) {
#![inline(always)]
unsafe { self.as_extern().checkstack_(extra) }
}
/// Exchanges values between different threads of the same global state.
/// This method pops n values from the stack `self`, and pushes them to the
/// stack `to`.
///
/// Note: this method is unsafe because it cannot check to ensure that both
/// threads belong to the same global state.
///
/// Despite being unsafe, it still checks the validity of `n`.
pub unsafe fn xmove(&mut self, to: &mut State, n: i32) {
#![inline(always)]
self.as_extern().xmove(to.as_extern(), n)
}
/// Returns `true` if the value at the given acceptable index is a number,
/// or a string convertible to a number.
pub fn isnumber(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnumber(idx) }
}
/// Returns `true` if the value at the given acceptable index is a string
/// or a number (which is always convertible to a string).
pub fn isstring(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isstring(idx) }
}
/// Returns `true` if the value at the given acceptable index is a C
/// function.
pub fn iscfunction(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().iscfunction(idx) }
}
/// Returns `true` if the value at the given acceptable index is a userdata
/// (either full or light).
pub fn isuserdata(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isuserdata(idx) }
}
/// Returns the type of the value at the given acceptable index. If the
/// given index is non-valid, returns None.
pub fn type_(&mut self, idx: i32) -> Option<Type> {
#![inline(always)]
unsafe { self.as_extern().type_(idx) }
}
/// Returns the name of the type of the value at the given acceptable
/// index.
pub fn typename(&mut self, idx: i32) -> &'static str {
#![inline(always)]
unsafe { self.as_extern().typename(idx) }
}
/// Returns `true` if the two values in acceptable indices `index1` and
/// `index2` are equal, following the semantics of the Lua == operator.
/// Returns `false` if any indices are non-valid.
pub fn equal(&mut self, index1: i32, index2: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().equal(index1, index2) }
}
/// Returns `true` if the two values in acceptable indices `index1` and
/// `index2` are primitively equal (that is, without calling any
/// metamethods). Returns `false` if any indices are non-valid.
pub fn rawequal(&mut self, index1: i32, index2: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().rawequal(index1, index2) }
}
/// Returns `true` if the value at acceptable index `index1` is smaller
/// than the value at acceptable index `index2`, following the semantics of
/// the Lua < operator. Returns `false` if any indices are non-valid.
pub fn lessthan(&mut self, index1: i32, index2: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().lessthan(index1, index2) }
}
/// Converts the Lua value at the given acceptable index to a f64. The Lua
/// value must be a number or a string convertible to a number; otherwise,
/// tonumber returns 0.
pub fn tonumber(&mut self, idx: i32) -> f64 {
#![inline(always)]
unsafe { self.as_extern().tonumber(idx) }
}
/// Converts the Lua value at the given acceptable index to an isize. The Lua
/// value must be a number or a string convertiable to a number; otherwise,
/// toint returns 0.
pub fn tointeger(&mut self, idx: i32) -> isize {
#![inline(always)]
unsafe { self.as_extern().tointeger(idx) }
}
/// Converts the value at the given acceptable index to a bool.
/// Returns false when called with a non-valid index.
pub fn toboolean(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().toboolean(idx) }
}
/// Converts the value at the given acceptable index to a string.
///
/// Returns None if the value is not a number or a string.
/// Returns None if the string value is not utf-8.
///
/// Note: if the value is a number, this method changes the value in the
/// stack to a string. This may confuse lua_next if this is called during
/// table traversal.
pub fn tostring<'a>(&'a mut self, idx: i32) -> Option<&'a str> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().tostring(idx)) }
}
/// Converts the value at the given acceptable index into a lua string, and
/// returns it as a byte vector.
/// Returns None if the value is not a number or a string.
/// See tostring() for caveats.
pub fn tobytes<'a>(&'a mut self, idx: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().tobytes(idx)) }
}
/// Returns the "length" of the value at the given acceptable index.
pub fn objlen(&mut self, idx: i32) -> usize {
#![inline(always)]
unsafe { self.as_extern().objlen(idx) }
}
/// Converts a value at the given acceptable index to a C function. The
/// value must be a C function; otherwise, returns None.
pub fn tocfunction(&mut self, idx: i32) -> Option<CFunction> {
#![inline(always)]
unsafe { self.as_extern().tocfunction(idx) }
}
/// If the value at the given acceptable index is a full userdata, returns
/// its block address. If the value is a light userdata, returns its
/// pointer. Otherwise, returns ptr::null().
pub fn touserdata(&mut self, idx: i32) -> *mut libc::c_void {
#![inline(always)]
unsafe { self.as_extern().touserdata(idx) }
}
/// Converts the value at the given acceptable index to a Lua thread
/// (represented as a State). This value must be a thread; otherwise, the
/// method returns None.
///
/// Note: the State return value does not make any assumptions about the
/// available stack space. .checkstack() must be called in order to
/// consider any non-valid index as acceptable.
pub fn tothread(&mut self, idx: i32) -> Option<State> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().tothread(idx)) }
}
/// Converts the value at the given acceptable index to a pointer. The
/// value can be a userdata, a table, a thread, or a function.
pub fn topointer(&mut self, idx: i32) -> *const libc::c_void {
#![inline(always)]
unsafe { self.as_extern().topointer(idx) }
}
/// Pushes a nil value onto the stack.
pub fn pushnil(&mut self) {
#![inline(always)]
unsafe { self.as_extern().pushnil() }
}
/// Pushes a number with value `n` onto the stack
pub fn pushnumber(&mut self, n: f64) {
#![inline(always)]
unsafe { self.as_extern().pushnumber(n) }
}
/// Pushes a number with value `n` onto the stack.
pub fn pushinteger(&mut self, n: isize) {
#![inline(always)]
unsafe { self.as_extern().pushinteger(n) }
}
/// Pushes a string onto the stack
pub fn pushstring(&mut self, s: &str) {
#![inline(always)]
unsafe { self.as_extern().pushstring(s) }
}
/// Pushes a byte vector onto the stack as a lua string
pub fn pushbytes(&mut self, bytes: &[u8]) {
#![inline(always)]
unsafe { self.as_extern().pushbytes(bytes) }
}
/// Pushes a new C closure onto the stack.
///
/// When a C function is created, it is possible to associate some values
/// with it, thus creating a C closure; these values are then accessible to
/// the function whenever it is called. These values must be pushed onto
/// the stack (in order), then pushclosure() is called to create and push
/// the C closure onto the stack. The argument `n` is the number of values
/// that should be associated with the function. These values are popped
/// from the stack.
///
/// `n` must be in the range [0, 255]. Anything outside this range will
/// throw an error.
pub fn pushcclosure(&mut self, f: CFunction, n: i32) {
#![inline(always)]
unsafe { self.as_extern().pushcclosure(f, n) }
}
/// Pushes a boolean value onto the stack.
pub fn pushboolean(&mut self, b: bool) {
#![inline(always)]
unsafe { self.as_extern().pushboolean(b) }
}
/// Pushes a light userdata onto the stack.
pub fn pushlightuserdata(&mut self, p: *mut libc::c_void) {
#![inline(always)]
unsafe { self.as_extern().pushlightuserdata(p) }
}
/// Pushes the thread represented by `self` onto the stack. Returns `true`
/// if this thread is the main thread of the state.
pub fn pushthread(&mut self) -> bool {
#![inline(always)]
unsafe { self.as_extern().pushthread() }
}
/// Pushes onto the stack the value t[k], where t is the value at the given
/// valid index and k is the value at the top of the stack. The key is
/// popped from the stack.
pub fn gettable(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().gettable(idx) }
}
/// Pushes onto the stack the value t[k], where t is the value at the given
/// valid index. Fails the task if `k` has any interior NULs.
pub fn getfield(&mut self, idx: i32, k: &str) {
#![inline(always)]
unsafe { self.as_extern().getfield(idx, k) }
}
/// Similar to gettable(), but does a raw access
pub fn rawget(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().rawget(idx) }
}
/// Pushes onto the stack the value t[n], where t is the value at the given
/// valid index. The access is raw; that is, it does not invoke
/// metamethods.
pub fn rawgeti(&mut self, idx: i32, n: i32) {
#![inline(always)]
unsafe { self.as_extern().rawgeti(idx, n) }
}
/// Creates a new empty table and pushes it into the stack. The new table
/// has space pre-allocated for `narr` array elements and `nrec` non-array
/// elements.
pub fn createtable(&mut self, narr: i32, nrec: i32) {
#![inline(always)]
unsafe { self.as_extern().createtable(narr, nrec) }
}
/// This method allocates a new block of memory with the given size, pushes
/// onto the stack a new full userdata with the block address, and returns
/// this address.
pub fn newuserdata(&mut self, size: usize) -> *mut libc::c_void {
#![inline(always)]
unsafe { self.as_extern().newuserdata(size) }
}
/// Pushes onto the stack the metatable of the value at the given
/// acceptable index. If the index is not valid, or the value does not have
/// a metatable, the function returns `false` and pushes nothing onto the
/// stack.
pub fn getmetatable(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().getmetatable(idx) }
}
/// Pushes onto the stack the environment table of the value at the given
/// index.
pub fn getfenv(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().getfenv(idx) }
}
/// Does the equivalent to t[k] = v, where t is the value at the given
/// valid index, v is the value at the top of the stack, and k is the value
/// just below the top.
///
/// This function pops both the key and the value from the stack.
pub fn settable(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().settable(idx) }
}
/// Does the equivalent to t[k] = v, where t is the value at the given
/// valid index and v is the value at the top of the stack.
///
/// This function pops the value from the stack.
///
/// Fails the task if `k` contains interior NULs.
pub fn setfield(&mut self, idx: i32, k: &str) {
#![inline(always)]
unsafe { self.as_extern().setfield(idx, k) }
}
/// Similar to settable(), but does a raw assignment.
pub fn rawset(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().rawset(idx) }
}
/// Does the equivalent of t[n] = v, where t is the value at the given
/// valid index and v is the value at the top of the stack.
///
/// This function pops the value from the stack. The assignment is raw;
/// that is, it does not invoke metamethods.
pub fn rawseti(&mut self, idx: i32, n: i32) {
#![inline(always)]
unsafe { self.as_extern().rawseti(idx, n) }
}
/// Pops a table from the stack and sets it as the new metatable for the
/// value at the given acceptable index.
pub fn setmetatable(&mut self, idx: i32) {
#![inline(always)]
unsafe { self.as_extern().setmetatable(idx) }
}
/// Pops a table from the stack and sets it as the new environment for the
/// value at the given index. If the value at the given index is neither a
/// function nor a thread nor a userdata, setfenv() returns `false`.
/// Otherwise, returns `true`.
pub fn setfenv(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().setfenv(idx) }
}
/// Calls a function.
/// The function must be pushed first, followed by its arguments. `nargs`
/// is the number of arguments. The function and its arguments are popped
/// automatically.
/// The function results are adjusted to `nresults`, unless `nresults` is
/// `MULTRET`, in which case all function results are pushed.
pub fn call(&mut self, nargs: i32, nresults: i32) {
#![inline(always)]
unsafe { self.as_extern().call(nargs, nresults) }
}
/// Calls a function in protected mode.
///
/// If no error occurs, this behaves identically to call() and returns
/// Ok(()). If there is any error, the error message is pushed onto the
/// stack, and an error code is returned. The function and its arguments
/// are always removed from the stack.
///
/// If `errfunc` is 0, then the error message returned on the stack is
/// exactly the original error message. Otherwise, `errfunc` is the stack
/// index of an error handler function. It must not be a pseudo-index.
pub fn pcall(&mut self, nargs: i32, nresults: i32, errfunc: i32) -> Result<(),PCallError> {
#![inline(always)]
unsafe { self.as_extern().pcall(nargs, nresults, errfunc) }
}
/// Loads a Lua chunk. If there are no errors, load() pushes the compiled
/// chunk as a Lua function on top of the stack. Otherwise, it pushes an
/// error message.
///
/// This method only loads a chunk; it does not run it.
///
/// load() automatically detects whether the chunk is text or binary, and
/// loads it accordingly.
///
/// The load() method uses a user-supplied `reader` function to read the
/// chunk. The `data` argument is an opaque value passed to the reader
/// function.
///
/// The `chunkname` argument gives a name to the chunk, which is used for
/// error messages and in debug information.
///
/// Fails the task if `chunkname` contains interior NULs.
pub fn load(&mut self, reader: Reader, data: *mut libc::c_void, chunkname: &str)
-> Result<(),LoadError> {
#![inline(always)]
unsafe { self.as_extern().load(reader, data, chunkname) }
}
/// Dumps a function as a binary chunk. Receives a Lua function on the top
/// of the stack and produces a binary chunk that, if loaded again, results
/// in a function equivalent to the one dumped. As it produces parts of the
/// chunk, dump() calls function `writer` with the given `data` to write
/// them.
///
/// The value returned is the error code returned by the last call to the
/// writer; Ok(()) means no errors.
///
/// This function does not pop the Lua function from the stack.
pub fn dump(&mut self, writer: Writer, data: *mut libc::c_void) -> Result<(),i32> {
#![inline(always)]
unsafe { self.as_extern().dump(writer, data) }
}
/// Yields a coroutine.
///
/// This function should only be called as the return expression of a C
/// function, as follows:
///
/// return L.yield_(nresults);
///
/// When a C function calls yield_() in that way, the running coroutine
/// suspends its execution, and the call to resume() that started this
/// coroutine returns. The parameter `nresults` is the number of values
/// from the stack that are passed as the results to resume().
pub fn yield_(&mut self, nresults: i32) -> c_int {
#![inline(always)]
unsafe { self.as_extern().yield_(nresults) }
}
/// Starts and resumes a coroutine in a given thread.
///
/// To start a coroutine, you first create a new thread (see thread());
/// then you push onto its stack the main function plus any arguments; then
/// you call resume(), with `narg` being the number of arguments. This call
/// returns when the coroutine suspends or finishes its execution. When it
/// returns, the stack contains all values passed to yield_(), or all
/// values returned by the body function. resume() returns Ok(false) if the
/// coroutine yields, Ok(true) if the coroutine finishes its execution
/// without errors, or Err(PCallError) in case of errors. In case of
/// errors, the stack is not unwound, so you can use the debug API over it.
/// The error message is on top of the stack. To restart a coroutine, you
/// put on its stack only the values to be passed as results from yield_(),
/// and then call resume().
pub fn resume(&mut self, narg: i32) -> Result<bool,PCallError> {
#![inline(always)]
unsafe { self.as_extern().resume(narg) }
}
/// Returns the status of the receiving thread.
///
/// The status can be Ok(true) for a normal thread, Ok(false) if the thread
/// is suspended, or Err(PCallError) if the thread finished its execution
/// with an error.
pub fn status(&mut self) -> Result<bool,PCallError> {
#![inline(always)]
unsafe { self.as_extern().status() }
}
/// Controls the garbage collector.
///
/// This method performs several tasks, according to the value of the
/// parameter `what`. See the `GC` enum for documentation on the various
/// options.
pub fn gc(&mut self, what: GC, data: i32) -> i32 {
#![inline(always)]
unsafe { self.as_extern().gc(what, data) }
}
/// Raises an error (using the value at the top of the stack)
pub fn error(&mut self) -> ! {
#![inline(always)]
unsafe { self.as_extern().error() }
}
/// Pops a key from the stack, and pushes a key-value pair from the table
/// at the given index (the "next" pair after the given key). If there are
/// no more elements in the table, then next() returns false (and pushes
/// nothing).
///
/// A typical traversal looks like this:
///
/// /* table is in the stack at index 't' */
/// L.pushnil(); // first key
/// while L.next(t) {
/// /* uses 'key' (at index -2) and 'value' (at index -1) */
/// println!("{} - {}", L.typename(-2), L.typename(-1));
/// /* removes 'value'; keeps 'key' for next iteration */
/// L.pop(1);
/// }
///
/// While traversing a table, do not call tostring() or tobytes() directly
/// on a key, unless you know that the key is actually a string. Recall
/// that tostring() changes the value at the given index; this confuses the
/// next call to next().
pub fn next(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().next(idx) }
}
/// Concatenates the `n` values at the top of the stack, pops them, and
/// leaves the result at the top.
/// Errors if n is negative or larger than the stack top.
pub fn concat(&mut self, n: i32) {
#![inline(always)]
unsafe { self.as_extern().concat(n) }
}
/// Returns the memory-allocation function of a given state. If `ud` is not
/// NULL, Lua stores in `*ud` the opaque pointer passed to lua_newstate().
///
/// Note: State::new() always provides NULL as the opaque pointer. It also
/// provides a default alloc function that behaves identically to the one
/// used by luaL_newstate().
pub unsafe fn getallocf(&mut self, ud: *mut *mut libc::c_void) -> Alloc {
#![inline(always)]
self.as_extern().getallocf(ud)
}
/// Changes the allocator function of a given state to `f` with user data
/// `ud`.
pub unsafe fn setallocf(&mut self, f: Alloc, ud: *mut libc::c_void) {
#![inline(always)]
self.as_extern().setallocf(f, ud)
}
/// Pop n elements from the stack.
/// Errors if the stack is smaller than n
pub fn pop(&mut self, n: i32) {
#![inline(always)]
unsafe { self.as_extern().pop(n) }
}
/// Creates a new empty table and pushes it onto the stack.
/// It is equivalent to .createtable(0, 0).
pub fn newtable(&mut self) {
#![inline(always)]
unsafe { self.as_extern().newtable() }
}
/// Sets the C function `f` as the new value of global `name`.
/// Fails the task if `name` has interior NULs.
pub fn register(&mut self, name: &str, f: CFunction) {
#![inline(always)]
unsafe { self.as_extern().register(name, f) }
}
/// Pushes a C function onto the stack.
pub fn pushcfunction(&mut self, f: CFunction) {
#![inline(always)]
unsafe { self.as_extern().pushcfunction(f) }
}
/// Returns `true` if the value at the given acceptable index is a function
/// (either C or Lua).
pub fn isfunction(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isfunction(idx) }
}
/// Returns `true` if the value at the given acceptable index is a table.
pub fn istable(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().istable(idx) }
}
/// Returns `true` if the value at the given acceptable index is a light
/// userdata.
pub fn islightuserdata(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().islightuserdata(idx) }
}
/// Returns `true` if the value at the given acceptable index is `nil`.
pub fn isnil(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnil(idx) }
}
/// Returns `true` if the value at the given acceptable index has type
/// boolean.
pub fn isboolean(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isboolean(idx) }
}
/// Returns `true` if the value at the given acceptable index is a thread.
pub fn isthread(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isthread(idx) }
}
/// Returns `true` if the given acceptable index is not valid.
pub fn isnone(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnone(idx) }
}
/// Returns `true` if the given acceptable index is not valid or if the
/// value at this index is nil.
pub fn isnoneornil(&mut self, idx: i32) -> bool {
#![inline(always)]
unsafe { self.as_extern().isnoneornil(idx) }
}
/// Pops a value from the stack and sets it as the new value of global
/// `name`. Fails the task if `name` has interior NULs.
pub fn setglobal(&mut self, name: &str) {
#![inline(always)]
unsafe { self.as_extern().setglobal(name) }
}
/// Pushes onto the stack the value of the global `name`.
/// Fails the task if `name` has interior NULs.
pub fn getglobal(&mut self, name: &str) {
#![inline(always)]
unsafe { self.as_extern().getglobal(name) }
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub unsafe fn newthread(&mut self) -> State {
self.as_raw().newthread()
}
pub unsafe fn atpanic(&mut self, panicf: CFunction) -> CFunction {
self.as_raw().atpanic(panicf)
}
unsafe fn check_acceptable(&mut self, idx: i32) {
if idx > 0 {
luaassert!(self, idx <= self.stackspace,
"index {} is not acceptable (stack space is {})", idx, self.stackspace);
} else if idx < 0 {
self.check_valid(idx, true);
} else {
self.errorstr("index 0 is not acceptable");
}
}
unsafe fn check_valid(&mut self, idx: i32, allowpseudo: bool) {
match idx {
0 => self.errorstr("index 0 is not valid"),
GLOBALSINDEX |
REGISTRYINDEX |
ENVIRONINDEX => luaassert!(self, allowpseudo,
"Pseudo-indices are not valid for this call"),
_ if idx < GLOBALSINDEX => {
luaassert!(self, allowpseudo, "Pseudo-indices are not valid for this call");
// we can't actually test for upvalue validity
// at least not without using lua_Debug, which seems excessive.
// However, I think that invalid but acceptable upvalues are treated as nil
let upvalidx = GLOBALSINDEX - idx;
luaassert!(self, upvalidx <= 256, "upvalue index {} is out of range", upvalidx);
}
_ => {
let top = self.gettop();
luaassert!(self, idx.abs() <= top, "index {} is not valid (stack top is {})", idx,
top);
}
}
}
pub unsafe fn describe(&mut self, idx: i32) -> String {
self.check_acceptable(idx);
self.checkstack_(1);
self.as_raw().describe(idx)
}
pub unsafe fn describe_(&mut self, idx: i32, usestack: bool) -> String {
self.check_acceptable(idx);
if usestack { self.checkstack_(1); }
self.as_raw().describe_(idx, usestack)
}
pub fn gettop(&mut self) -> i32 {
self.as_raw().gettop()
}
pub unsafe fn settop(&mut self, idx: i32) {
if idx != 0 { self.check_acceptable(idx); }
self.as_raw().settop(idx);
}
pub unsafe fn pushvalue(&mut self, idx: i32) {
self.check_valid(idx, true);
self.checkstack_(1);
self.as_raw().pushvalue(idx)
}
pub unsafe fn remove(&mut self, idx: i32) {
self.check_valid(idx, false);
self.as_raw().remove(idx)
}
pub unsafe fn insert(&mut self, idx: i32) {
self.check_valid(idx, false);
self.as_raw().insert(idx)
}
pub unsafe fn replace(&mut self, idx: i32) {
self.check_valid(idx, true);
self.as_raw().replace(idx)
}
pub unsafe fn checkstack(&mut self, extra: i32) -> bool {
self.as_raw().checkstack(extra)
}
pub unsafe fn checkstack_(&mut self, extra: i32) {
self.as_raw().checkstack_(extra)
}
pub unsafe fn xmove(&mut self, to: &mut ExternState, n: i32) {
luaassert!(self, self.gettop() >= n, "xmove: stack underflow");
to.checkstack_(1);
self.as_raw().xmove(to.as_raw(), n)
}
pub unsafe fn isnumber(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnumber(idx)
}
pub unsafe fn isstring(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isstring(idx)
}
pub unsafe fn iscfunction(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().iscfunction(idx)
}
pub unsafe fn isuserdata(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isuserdata(idx)
}
pub unsafe fn type_(&mut self, idx: i32) -> Option<Type> {
self.check_acceptable(idx);
self.as_raw().type_(idx)
}
pub unsafe fn typename(&mut self, idx: i32) -> &'static str {
self.check_acceptable(idx);
self.as_raw().typename(idx)
}
pub unsafe fn equal(&mut self, index1: i32, index2: i32) -> bool {
self.check_acceptable(index1);
self.check_acceptable(index2);
self.as_raw().equal(index1, index2)
}
pub unsafe fn rawequal(&mut self, index1: i32, index2: i32) -> bool {
self.check_acceptable(index1);
self.check_acceptable(index2);
self.as_raw().rawequal(index1, index2)
}
pub unsafe fn lessthan(&mut self, index1: i32, index2: i32) -> bool {
self.check_acceptable(index1);
self.check_acceptable(index2);
self.as_raw().lessthan(index1, index2)
}
pub unsafe fn tonumber(&mut self, idx: i32) -> f64 {
self.check_acceptable(idx);
self.as_raw().tonumber(idx)
}
pub unsafe fn tointeger(&mut self, idx: i32) -> isize {
self.check_acceptable(idx);
self.as_raw().tointeger(idx)
}
pub unsafe fn toboolean(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().toboolean(idx)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// ExternState, but its lifetime is actually that of the value on the
/// stack.
pub unsafe fn tostring(&mut self, idx: i32) -> Option<&'static str> {
self.check_acceptable(idx);
self.as_raw().tostring(idx)
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// ExternState, but its lifetime is actually that of hte value on the
/// stack.
pub unsafe fn tobytes(&mut self, idx: i32) -> Option<&'static [u8]> {
self.check_acceptable(idx);
self.as_raw().tobytes(idx)
}
pub unsafe fn objlen(&mut self, idx: i32) -> usize {
self.check_acceptable(idx);
self.as_raw().objlen(idx)
}
pub unsafe fn tocfunction(&mut self, idx: i32) -> Option<CFunction> {
self.check_acceptable(idx);
self.as_raw().tocfunction(idx)
}
pub unsafe fn touserdata(&mut self, idx: i32) -> *mut libc::c_void {
self.check_acceptable(idx);
self.as_raw().touserdata(idx)
}
pub unsafe fn tothread(&mut self, idx: i32) -> Option<ExternState> {
self.check_acceptable(idx);
self.as_raw().tothread(idx)
}
pub unsafe fn topointer(&mut self, idx: i32) -> *const libc::c_void {
self.check_acceptable(idx);
self.as_raw().topointer(idx)
}
pub unsafe fn pushnil(&mut self) {
self.checkstack_(1);
self.as_raw().pushnil()
}
pub unsafe fn pushnumber(&mut self, n: f64) {
self.checkstack_(1);
self.as_raw().pushnumber(n)
}
pub unsafe fn pushinteger(&mut self, n: isize) {
self.checkstack_(1);
self.as_raw().pushinteger(n)
}
pub unsafe fn pushstring(&mut self, s: &str) {
self.checkstack_(1);
self.as_raw().pushstring(s)
}
pub unsafe fn pushbytes(&mut self, bytes: &[u8]) {
self.checkstack_(1);
self.as_raw().pushbytes(bytes)
}
pub unsafe fn pushcclosure(&mut self, f: CFunction, n: i32) {
if n == 0 {
self.checkstack_(1);
} else {
luaassert!(self, n >= 0 && n <= 255, "pushcclosure: invalid argument n");
}
self.as_raw().pushcclosure(f, n)
}
pub unsafe fn pushboolean(&mut self, b: bool) {
self.checkstack_(1);
self.as_raw().pushboolean(b)
}
pub unsafe fn pushlightuserdata(&mut self, p: *mut libc::c_void) {
self.checkstack_(1);
self.as_raw().pushlightuserdata(p)
}
pub unsafe fn pushthread(&mut self) -> bool {
self.checkstack_(1);
self.as_raw().pushthread()
}
pub unsafe fn gettable(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() > 0, "gettable: stack underflow");
self.as_raw().gettable(idx)
}
pub unsafe fn getfield(&mut self, idx: i32, k: &str) {
self.check_valid(idx, true);
self.checkstack_(1);
self.as_raw().getfield(idx, k)
}
pub unsafe fn rawget(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() > 0, "rawget: stack underflow");
self.as_raw().rawget(idx)
}
pub unsafe fn rawgeti(&mut self, idx: i32, n: i32) {
self.check_valid(idx, true);
self.checkstack_(1);
self.as_raw().rawgeti(idx, n)
}
pub unsafe fn createtable(&mut self, narr: i32, nrec: i32) {
self.checkstack_(1);
self.as_raw().createtable(narr, nrec)
}
pub unsafe fn newuserdata(&mut self, size: usize) -> *mut libc::c_void {
self.checkstack_(1);
self.as_raw().newuserdata(size)
}
pub unsafe fn getmetatable(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.checkstack_(1);
self.as_raw().getmetatable(idx)
}
pub unsafe fn getfenv(&mut self, idx: i32) {
self.check_acceptable(idx);
self.checkstack_(1);
self.as_raw().getfenv(idx)
}
pub unsafe fn settable(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() >= 2, "settable: stack underflow");
self.as_raw().settable(idx)
}
pub unsafe fn setfield(&mut self, idx: i32, k: &str) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() >= 1, "setfield: stack underflow");
self.as_raw().setfield(idx, k)
}
pub unsafe fn rawset(&mut self, idx: i32) {
self.check_valid(idx, true);
luaassert!(self, self.gettop() >= 2, "rawset: stack underflow");
self.as_raw().rawset(idx)
}
pub unsafe fn rawseti(&mut self, idx: i32, n: i32) {
self.check_valid(idx, true);
self.as_raw().rawseti(idx, n)
}
pub unsafe fn setmetatable(&mut self, idx: i32) {
self.check_acceptable(idx);
luaassert!(self, self.istable(-1), "setmetatable: top stack value must be a table");
self.as_raw().setmetatable(idx)
}
pub unsafe fn setfenv(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
luaassert!(self, self.istable(-1), "setfenv: top stack value must be a table");
self.as_raw().setfenv(idx)
}
pub unsafe fn call(&mut self, nargs: i32, nresults: i32) {
luaassert!(self, nargs >= 0, "call: invalid nargs");
luaassert!(self, nresults == MULTRET || nresults >= 0, "call: invalid nresults");
luaassert!(self, self.gettop() > nargs, "call: stack underflow");
if nresults > nargs + 1 { self.checkstack_(nargs - nresults - 1) }
self.as_raw().call(nargs, nresults)
}
pub unsafe fn pcall(&mut self, nargs: i32, nresults: i32, errfunc: i32)
-> Result<(),PCallError> {
luaassert!(self, nargs >= 0, "pcall: invalid nargs");
luaassert!(self, nresults == MULTRET || nresults >= 0, "pcall: invalid nresults");
luaassert!(self, self.gettop() > nargs, "pcall: stack underflow");
if errfunc != 0 {
self.check_valid(errfunc, false)
}
if nresults > nargs + 1 { self.checkstack_(nargs - nresults - 1) }
self.as_raw().pcall(nargs, nresults, errfunc)
}
pub unsafe fn load(&mut self, reader: Reader, data: *mut libc::c_void, chunkname: &str)
-> Result<(),LoadError> {
self.checkstack_(1);
self.as_raw().load(reader, data, chunkname)
}
pub unsafe fn dump(&mut self, writer: Writer, data: *mut libc::c_void) -> Result<(),i32> {
luaassert!(self, self.gettop() >= 1, "dump: stack underflow");
self.as_raw().dump(writer, data)
}
pub unsafe fn yield_(&mut self, nresults: i32) -> c_int {
luaassert!(self, self.gettop() >= nresults, "yield: stack underflow");
self.as_raw().yield_(nresults)
}
pub unsafe fn resume(&mut self, narg: i32) -> Result<bool,PCallError> {
luaassert!(self, self.gettop() > narg, "resume: stack underflow");
self.as_raw().resume(narg)
}
pub unsafe fn status(&mut self) -> Result<bool,PCallError> {
self.as_raw().status()
}
pub unsafe fn gc(&mut self, what: GC, data: i32) -> i32 {
self.as_raw().gc(what, data)
}
pub unsafe fn error(&mut self) -> ! {
luaassert!(self, self.gettop() > 0, "error: stack underflow");
self.as_raw().error()
}
pub unsafe fn next(&mut self, idx: i32) -> bool {
self.check_valid(idx, true);
self.as_raw().next(idx)
}
pub unsafe fn concat(&mut self, n: i32) {
luaassert!(self, n >= 0, "concat: invalid argument n");
luaassert!(self, n <= self.gettop(), "concat: stack underflow");
if n == 0 { self.checkstack_(1) }
self.as_raw().concat(n)
}
pub unsafe fn getallocf(&mut self, ud: *mut *mut libc::c_void) -> Alloc {
self.as_raw().getallocf(ud)
}
pub unsafe fn setallocf(&mut self, f: Alloc, ud: *mut libc::c_void) {
self.as_raw().setallocf(f, ud)
}
pub unsafe fn pop(&mut self, n: i32) {
if n >= 0 {
luaassert!(self, self.gettop() >= n, "pop: stack underflow");
} else {
luaassert!(self, self.gettop() >= (n+1).abs(), "pop: stack underflow");
}
self.as_raw().pop(n)
}
pub unsafe fn newtable(&mut self) {
self.checkstack_(1);
self.as_raw().newtable()
}
pub unsafe fn register(&mut self, name: &str, f: CFunction) {
self.checkstack_(1);
self.as_raw().register(name, f)
}
pub unsafe fn pushcfunction(&mut self, f: CFunction) {
self.checkstack_(1);
self.as_raw().pushcfunction(f)
}
pub unsafe fn isfunction(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isfunction(idx)
}
pub unsafe fn istable(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().istable(idx)
}
pub unsafe fn islightuserdata(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().islightuserdata(idx)
}
pub unsafe fn isnil(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnil(idx)
}
pub unsafe fn isboolean(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isboolean(idx)
}
pub unsafe fn isthread(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isthread(idx)
}
pub unsafe fn isnone(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnone(idx)
}
pub unsafe fn isnoneornil(&mut self, idx: i32) -> bool {
self.check_acceptable(idx);
self.as_raw().isnoneornil(idx)
}
pub unsafe fn setglobal(&mut self, name: &str) {
luaassert!(self, self.gettop() > 0, "setglobal: stack underflow");
self.as_raw().setglobal(name)
}
pub unsafe fn getglobal(&mut self, name: &str) {
self.checkstack_(1);
self.as_raw().getglobal(name)
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub unsafe fn newthread(&mut self) -> State {
#![inline]
mem::transmute(ExternState::from_lua_State(raw::lua_newthread(self.L)))
}
pub unsafe fn atpanic(&mut self, panicf: CFunction) -> CFunction {
#![inline]
raw::lua_atpanic(self.L, panicf)
}
pub unsafe fn describe(&mut self, idx: i32) -> String {
self.describe_(idx, true)
}
pub unsafe fn describe_(&mut self, idx: i32, usestack: bool) -> String {
match self.type_(idx) {
None => "".to_string(),
Some(typ) => match typ {
Type::Nil => "nil".to_string(),
Type::Boolean => if self.toboolean(idx) { "true".to_string() }
else { "false".to_string() },
Type::Number => {
// Let Lua create the string instead of us
if usestack { self.pushvalue(idx); } // copy the value
let s = self.tostring(-1).map(|s| s.to_string());
if usestack { self.pop(1); } // remove the copied value
s.unwrap_or_default() // default will be ~""
}
Type::String => {
self.tostring(idx).unwrap_or("<invalid utf8>").to_string()
}
Type::LightUserdata |
Type::Userdata |
Type::Table |
Type::Thread |
Type::Function => {
let s = self.typename(idx);
let p = self.topointer(idx);
format!("<{} {:p}>", s, p)
}
}
}
}
pub fn gettop(&mut self) -> i32 {
#![inline]
unsafe { raw::lua_gettop(self.L) as i32 }
}
pub unsafe fn settop(&mut self, idx: i32) {
#![inline]
raw::lua_settop(self.L, idx as c_int)
}
pub unsafe fn pushvalue(&mut self, idx: i32) {
#![inline]
raw::lua_pushvalue(self.L, idx as c_int)
}
pub unsafe fn remove(&mut self, idx: i32) {
#![inline]
raw::lua_remove(self.L, idx as c_int)
}
pub unsafe fn insert(&mut self, idx: i32) {
#![inline]
raw::lua_insert(self.L, idx as c_int)
}
pub unsafe fn replace(&mut self, idx: i32) {
#![inline]
raw::lua_replace(self.L, idx as c_int)
}
pub unsafe fn checkstack(&mut self, extra: i32) -> bool {
#![inline]
let top = self.gettop();
if top + extra > self.stackspace {
if raw::lua_checkstack(self.L, extra as c_int) != 0 {
self.stackspace = top + extra;
true
} else {
false
}
} else {
true
}
}
pub unsafe fn checkstack_(&mut self, extra: i32) {
#![inline]
luaassert!(self, self.checkstack(extra), "checkstack: cannot grow stack")
}
pub unsafe fn xmove(&mut self, to: &mut RawState, n: i32) {
#![inline]
raw::lua_xmove(self.L, to.L, n as c_int)
}
pub unsafe fn isnumber(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnumber(self.L, idx as c_int) != 0
}
pub unsafe fn isstring(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isstring(self.L, idx as c_int) != 0
}
pub unsafe fn iscfunction(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_iscfunction(self.L, idx as c_int) != 0
}
pub unsafe fn isuserdata(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isuserdata(self.L, idx as c_int) != 0
}
pub unsafe fn type_(&mut self, idx: i32) -> Option<Type> {
#![inline]
match raw::lua_type(self.L, idx as c_int) {
raw::LUA_TNONE => None,
raw::LUA_TNIL => Some(Type::Nil),
raw::LUA_TBOOLEAN => Some(Type::Boolean),
raw::LUA_TLIGHTUSERDATA => Some(Type::LightUserdata),
raw::LUA_TNUMBER => Some(Type::Number),
raw::LUA_TSTRING => Some(Type::String),
raw::LUA_TTABLE => Some(Type::Table),
raw::LUA_TFUNCTION => Some(Type::Function),
raw::LUA_TUSERDATA => Some(Type::Userdata),
raw::LUA_TTHREAD => Some(Type::Thread),
_ => self.errorstr("type: Unknown return value from lua_type")
}
}
pub unsafe fn typename(&mut self, idx: i32) -> &'static str {
#![inline]
let cs = CStr::from_ptr(aux::raw::luaL_typename(self.L, idx as c_int));
mem::transmute::<&str, &'static str>(str::from_utf8(cs.to_bytes()).unwrap())
}
pub unsafe fn equal(&mut self, index1: i32, index2: i32) -> bool {
#![inline]
raw::lua_equal(self.L, index1 as c_int, index2 as c_int) != 0
}
pub unsafe fn rawequal(&mut self, index1: i32, index2: i32) -> bool {
#![inline]
raw::lua_rawequal(self.L, index1 as c_int, index2 as c_int) != 0
}
pub unsafe fn lessthan(&mut self, index1: i32, index2: i32) -> bool {
#![inline]
raw::lua_lessthan(self.L, index1 as c_int, index2 as c_int) != 0
}
pub unsafe fn tonumber(&mut self, idx: i32) -> f64 {
#![inline]
raw::lua_tonumber(self.L, idx as c_int) as f64
}
pub unsafe fn tointeger(&mut self, idx: i32) -> isize {
#![inline]
raw::lua_tointeger(self.L, idx as c_int) as isize
}
pub unsafe fn toboolean(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_toboolean(self.L, idx as c_int) != 0
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
// TODO: change return type to use core::str::Utf8Error
pub unsafe fn tostring(&mut self, idx: i32) -> Option<&'static str> {
#![inline]
self.tobytes(idx).and_then(|v| str::from_utf8(v).ok())
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn tobytes(&mut self, idx: i32) -> Option<&'static [u8]> {
#![inline]
let mut sz: libc::size_t = 0;
let s = raw::lua_tolstring(self.L, idx, &mut sz);
if s.is_null() {
None
} else {
let buf = s as *const u8;
Some(slice::from_raw_parts(buf, sz as usize))
}
}
pub unsafe fn objlen(&mut self, idx: i32) -> usize {
#![inline]
raw::lua_objlen(self.L, idx as c_int) as usize
}
pub unsafe fn tocfunction(&mut self, idx: i32) -> Option<CFunction> {
#![inline]
raw::lua_tocfunction(self.L, idx as c_int)
}
pub unsafe fn touserdata(&mut self, idx: i32) -> *mut libc::c_void {
#![inline]
raw::lua_touserdata(self.L, idx as c_int)
}
pub unsafe fn tothread(&mut self, idx: i32) -> Option<ExternState> {
#![inline]
let s = raw::lua_tothread(self.L, idx as c_int);
if s.is_null() {
None
} else {
Some(ExternState { L: s, stackspace: 0, _marker: marker::PhantomData })
}
}
pub unsafe fn topointer(&mut self, idx: i32) -> *const libc::c_void {
#![inline]
raw::lua_topointer(self.L, idx as c_int)
}
pub unsafe fn pushnil(&mut self) {
#![inline]
raw::lua_pushnil(self.L)
}
pub unsafe fn pushnumber(&mut self, n: f64) {
#![inline]
raw::lua_pushnumber(self.L, n as raw::lua_Number)
}
pub unsafe fn pushinteger(&mut self, n: isize) {
#![inline]
raw::lua_pushinteger(self.L, n as raw::lua_Integer)
}
pub unsafe fn pushstring(&mut self, s: &str) {
#![inline]
raw::lua_pushlstring(self.L, s.as_ptr() as *const libc::c_char, s.len() as libc::size_t)
}
pub unsafe fn pushbytes(&mut self, bytes: &[u8]) {
#![inline]
raw::lua_pushlstring(self.L, bytes.as_ptr() as *const libc::c_char, bytes.len() as libc::size_t)
}
pub unsafe fn pushcclosure(&mut self, f: CFunction, n: i32) {
#![inline]
raw::lua_pushcclosure(self.L, f, n as c_int)
}
pub unsafe fn pushboolean(&mut self, b: bool) {
#![inline]
raw::lua_pushboolean(self.L, b as c_int)
}
pub unsafe fn pushlightuserdata(&mut self, p: *mut libc::c_void) {
#![inline]
raw::lua_pushlightuserdata(self.L, p)
}
pub unsafe fn pushthread(&mut self) -> bool {
#![inline]
raw::lua_pushthread(self.L) != 0
}
pub unsafe fn gettable(&mut self, idx: i32) {
#![inline]
raw::lua_gettable(self.L, idx as c_int)
}
pub unsafe fn getfield(&mut self, idx: i32, k: &str) {
#![inline]
raw::lua_getfield(self.L, idx as c_int, CString::new(k).unwrap().as_ptr())
}
pub unsafe fn rawget(&mut self, idx: i32) {
#![inline]
raw::lua_rawget(self.L, idx as c_int)
}
pub unsafe fn rawgeti(&mut self, idx: i32, n: i32) {
#![inline]
raw::lua_rawgeti(self.L, idx as c_int, n as c_int)
}
pub unsafe fn createtable(&mut self, narr: i32, nrec: i32) {
#![inline]
raw::lua_createtable(self.L, narr as c_int, nrec as c_int)
}
pub unsafe fn newuserdata(&mut self, size: usize) -> *mut libc::c_void {
#![inline]
raw::lua_newuserdata(self.L, size as libc::size_t)
}
pub unsafe fn getmetatable(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_getmetatable(self.L, idx as c_int) != 0
}
pub unsafe fn getfenv(&mut self, idx: i32) {
#![inline]
raw::lua_getfenv(self.L, idx as c_int)
}
pub unsafe fn settable(&mut self, idx: i32) {
#![inline]
raw::lua_settable(self.L, idx as c_int)
}
pub unsafe fn setfield(&mut self, idx: i32, k: &str) {
#![inline]
raw::lua_setfield(self.L, idx as c_int, CString::new(k).unwrap().as_ptr())
}
pub unsafe fn rawset(&mut self, idx: i32) {
#![inline]
raw::lua_rawset(self.L, idx as c_int)
}
pub unsafe fn rawseti(&mut self, idx: i32, n: i32) {
#![inline]
raw::lua_rawseti(self.L, idx as c_int, n as c_int)
}
pub unsafe fn setmetatable(&mut self, idx: i32) {
#![inline]
// ignore return value of lua_setmetatable(), it appears to always be 1
raw::lua_setmetatable(self.L, idx as c_int);
}
pub unsafe fn setfenv(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_setfenv(self.L, idx as c_int) != 0
}
pub unsafe fn call(&mut self, nargs: i32, nresults: i32) {
#![inline]
raw::lua_call(self.L, nargs as c_int, nresults as c_int)
}
pub unsafe fn pcall(&mut self, nargs: i32, nresults: i32, errfunc: i32)
-> Result<(),PCallError> {
match raw::lua_pcall(self.L, nargs as c_int, nresults as c_int, errfunc as c_int) {
0 => Ok(()),
i => Err(PCallError::from_code(i).unwrap_or_else(|| {
self.errorstr("pcall: unexpected error from lua_pcall")
}))
}
}
pub unsafe fn load(&mut self, reader: Reader, data: *mut libc::c_void, chunkname: &str)
-> Result<(),LoadError> {
let cstr = CString::new(chunkname).unwrap();
match raw::lua_load(self.L, reader, data, cstr.as_ptr()) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadError::ErrMem),
_ => self.errorstr("load: unexpected error from lua_load")
}
}
pub unsafe fn dump(&mut self, writer: Writer, data: *mut libc::c_void) -> Result<(),i32> {
#![inline]
match raw::lua_dump(self.L, writer, data) {
0 => Ok(()),
i => Err(i)
}
}
pub unsafe fn yield_(&mut self, nresults: i32) -> c_int {
#![inline]
raw::lua_yield(self.L, nresults as c_int)
}
pub unsafe fn resume(&mut self, narg: i32) -> Result<bool,PCallError> {
#![inline]
match raw::lua_resume(self.L, narg as c_int) {
raw::LUA_YIELD => Ok(false),
0 => Ok(true),
i => Err(PCallError::from_code(i).unwrap_or_else(|| {
self.errorstr("resume: unexpected error from lua_resume")
}))
}
}
pub unsafe fn status(&mut self) -> Result<bool,PCallError> {
#![inline]
match raw::lua_status(self.L) {
raw::LUA_YIELD => Ok(false),
0 => Ok(true),
i => Err(PCallError::from_code(i).unwrap_or_else(|| {
self.errorstr("status: unexpected error from lua_status")
}))
}
}
pub unsafe fn gc(&mut self, what: GC, data: i32) -> i32 {
#![inline]
raw::lua_gc(self.L, what as c_int, data as c_int) as i32
}
pub unsafe fn error(&mut self) -> ! {
#![inline]
raw::lua_error(self.L);
unreachable!()
}
pub unsafe fn next(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_next(self.L, idx as c_int) != 0
}
pub unsafe fn concat(&mut self, n: i32) {
#![inline]
raw::lua_concat(self.L, n as c_int)
}
pub unsafe fn getallocf(&mut self, ud: *mut *mut libc::c_void) -> Alloc {
#![inline]
raw::lua_getallocf(self.L, ud)
}
pub unsafe fn setallocf(&mut self, f: Alloc, ud: *mut libc::c_void) {
#![inline]
raw::lua_setallocf(self.L, f, ud)
}
pub unsafe fn pop(&mut self, n: i32) {
#![inline]
raw::lua_pop(self.L, n as c_int)
}
pub unsafe fn newtable(&mut self) {
#![inline]
raw::lua_newtable(self.L)
}
pub unsafe fn register(&mut self, name: &str, f: CFunction) {
#![inline]
raw::lua_register(self.L, CString::new(name).unwrap().as_ptr(), f)
}
pub unsafe fn pushcfunction(&mut self, f: CFunction) {
#![inline]
raw::lua_pushcfunction(self.L, f)
}
pub unsafe fn isfunction(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isfunction(self.L, idx as c_int)
}
pub unsafe fn istable(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_istable(self.L, idx as c_int)
}
pub unsafe fn islightuserdata(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_islightuserdata(self.L, idx)
}
pub unsafe fn isnil(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnil(self.L, idx)
}
pub unsafe fn isboolean(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isboolean(self.L, idx)
}
pub unsafe fn isthread(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isthread(self.L, idx)
}
pub unsafe fn isnone(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnone(self.L, idx)
}
pub unsafe fn isnoneornil(&mut self, idx: i32) -> bool {
#![inline]
raw::lua_isnoneornil(self.L, idx)
}
pub unsafe fn setglobal(&mut self, name: &str) {
#![inline]
raw::lua_setglobal(self.L, CString::new(name).unwrap().as_ptr())
}
pub unsafe fn getglobal(&mut self, name: &str) {
#![inline]
raw::lua_getglobal(self.L, CString::new(name).unwrap().as_ptr())
}
}
/// Name for the coroutine lib
pub const COLIBNAME: &'static str = lib::raw::LUA_COLIBNAME;
/// Name for the table lib
pub const TABLIBNAME: &'static str = lib::raw::LUA_TABLIBNAME;
/// Name for the io lib
pub const IOLIBNAME: &'static str = lib::raw::LUA_IOLIBNAME;
/// Name for the os lib
pub const OSLIBNAME: &'static str = lib::raw::LUA_OSLIBNAME;
/// Name for the string lib
pub const STRLIBNAME: &'static str = lib::raw::LUA_STRLIBNAME;
/// Name for the math lib
pub const MATHLIBNAME: &'static str = lib::raw::LUA_MATHLIBNAME;
/// Name for the debug lib
pub const DBLIBNAME: &'static str = lib::raw::LUA_DBLIBNAME;
/// Name for the package lib
pub const LOADLIBNAME: &'static str = lib::raw::LUA_LOADLIBNAME;
// Functions from lualib
impl State {
/// Open the basic library.
pub fn open_base(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_base() }
}
/// Opens the table library.
pub fn open_table(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_table() }
}
/// Opens the io library.
pub fn open_io(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_io() }
}
/// Opens the os library.
pub fn open_os(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_os() }
}
/// Opens the string library.
pub fn open_string(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_string() }
}
/// Opens the math library.
pub fn open_math(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_math() }
}
/// Opens the debug library.
pub fn open_debug(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_debug() }
}
/// Opens the package library.
pub fn open_package(&mut self) {
#![inline(always)]
unsafe { self.as_extern().open_package() }
}
/// Opens all standard Lua libraries.
pub fn openlibs(&mut self) {
#![inline(always)]
unsafe { self.as_extern().openlibs() }
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub unsafe fn open_base(&mut self) {
self.checkstack_(2);
self.as_raw().open_base()
}
pub unsafe fn open_table(&mut self) {
self.checkstack_(2);
self.as_raw().open_table()
}
pub unsafe fn open_io(&mut self) {
self.checkstack_(2);
self.as_raw().open_io()
}
pub unsafe fn open_os(&mut self) {
self.checkstack_(2);
self.as_raw().open_os()
}
pub unsafe fn open_string(&mut self) {
self.checkstack_(2);
self.as_raw().open_string()
}
pub unsafe fn open_math(&mut self) {
self.checkstack_(2);
self.as_raw().open_math()
}
pub unsafe fn open_debug(&mut self) {
self.checkstack_(2);
self.as_raw().open_debug()
}
pub unsafe fn open_package(&mut self) {
self.checkstack_(2);
self.as_raw().open_package()
}
pub unsafe fn openlibs(&mut self) {
self.checkstack_(2);
self.as_raw().openlibs()
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub unsafe fn open_base(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_base);
self.pushstring("");
self.call(1, 0);
}
pub unsafe fn open_table(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_table);
self.pushstring(TABLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_io(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_io);
self.pushstring(IOLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_os(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_os);
self.pushstring(OSLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_string(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_string);
self.pushstring(STRLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_math(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_math);
self.pushstring(MATHLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_debug(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_debug);
self.pushstring(DBLIBNAME);
self.call(1, 0);
}
pub unsafe fn open_package(&mut self) {
#![inline]
self.pushcfunction(lib::raw::luaopen_package);
self.pushstring(LOADLIBNAME);
self.call(1, 0);
}
pub unsafe fn openlibs(&mut self) {
#![inline]
lib::raw::luaL_openlibs(self.L)
}
}
pub const NOREF: i32 = aux::raw::LUA_NOREF as i32;
pub const REFNIL: i32 = aux::raw::LUA_REFNIL as i32;
// Functions from auxlib
impl State {
/// Opens a library.
///
/// When called with `libname` equal to None, it simply registers all
/// functions in the list `l` into the table on the top of the stack.
///
/// When called with a `libname` of Some(_), registerlib() creates a new
/// table `t`, sets it as the value of the global variable `libname`, sets
/// it as the value of `package.loaded[libname]`, and registers on it all
/// functions in the list `l`. If there is a table in
/// `package.loaded[libname]` or in variable `libname`, reuses this table
/// instead of creating a new one.
///
/// In any case the function leaves the table on the top of the stack.
pub fn registerlib(&mut self, libname: Option<&str>, l: &[(&str,CFunction)]) {
#![inline(always)]
unsafe { self.as_extern().registerlib(libname, l) }
}
/// Pushes onto the stack the field `e` from the metatable of the object at
/// index `obj`. If the object does not have a metatable, or if the
/// metatable does not have this field, returns `false` and pushes nothing.
pub fn getmetafield(&mut self, obj: i32, e: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().getmetafield(obj, e) }
}
/// Calls a metamethod.
///
/// If the object at index `obj` has a metatable and this metatable has a
/// field `e`, this method calls this field and passes the object as its
/// only argument. In this case this method returns `true` and pushes onto
/// the stack the value returned by the call. If there is no metatable or
/// no metamethod, this method returns `false` (without pushing any value
/// on the stack).
pub fn callmeta(&mut self, obj: i32, e: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().callmeta(obj, e) }
}
/// Generates an error with a message like the following:
///
/// <location>: bad argument <narg> to '<func>' (<tname> expected, got <rt>)
///
/// where `location` is produced by where(), `func` is the name of the
/// current function, and `rt` is the type name of the actual argument.
pub fn typerror(&mut self, narg: i32, tname: &str) -> ! {
#![inline(always)]
unsafe { self.as_extern().typerror(narg, tname) }
}
/// Raises an error with the following message, where `func` is taken from
/// the call stack:
///
/// bad argument #<narg> to <func> (<extramsg>)
pub fn argerror(&mut self, narg: i32, extramsg: &str) -> ! {
#![inline(always)]
unsafe { self.as_extern().argerror(narg, extramsg) }
}
/// Checks whether the function argument `narg` is a string, and returns
/// the string. This function uses lua_tolstring to get its result, so all
/// conversions and caveats of that function apply here.
///
/// If the string is not utf-8, returns None.
pub fn checkstring<'a>(&'a mut self, narg: i32) -> Option<&'a str> {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().checkstring(narg)) }
}
/// Checks whether the function argument `narg` is a lua string, and
/// returns it as a byte vector. See checkstring() for caveats.
pub fn checkbytes<'a>(&'a mut self, narg: i32) -> &'a [u8] {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().checkbytes(narg)) }
}
/// If the function argument `narg` is a string, returns this string. If
/// this argument is absent or is nil, returns `d`. Otherwise, raises an
/// error.
///
/// If the argument is a string, but is not utf-8, returns None.
pub fn optstring<'a>(&'a mut self, narg: i32, d: &'a str) -> Option<&'a str> {
#![inline(always)]
unsafe {
let d = mem::transmute::<&'a str, &'static str>(d);
mem::transmute(self.as_extern().optstring(narg, d))
}
}
/// If the function argument `narg` is a lua string, returns this string
/// asa byte vector. See optstring() for more information.
pub fn optbytes<'a>(&'a mut self, narg: i32, d: &'a [u8]) -> &'a [u8] {
#![inline(always)]
unsafe {
let d = mem::transmute::<&'a [u8], &'static [u8]>(d);
mem::transmute(self.as_extern().optbytes(narg, d))
}
}
/// Checks whether the function argument `narg` is a number and returns the
/// number.
pub fn checknumber(&mut self, narg: i32) -> f64 {
#![inline(always)]
unsafe { self.as_extern().checknumber(narg) }
}
/// If the function argument `narg` is a number, returns this number. If
/// the argument is absent or is nil, returns `d`. Otherwise, throws an
/// error.
pub fn optnumber(&mut self, narg: i32, d: f64) -> f64 {
#![inline(always)]
unsafe { self.as_extern().optnumber(narg, d) }
}
/// Checks whether the function argument `narg` is a number and returns it
/// as an isize.
pub fn checkinteger(&mut self, narg: i32) -> isize {
#![inline(always)]
unsafe { self.as_extern().checkinteger(narg) }
}
/// If the function argument `narg` is a number, returns this number cast
/// to an isize. If this argument is absent or nil, returns `d`. Otherwise,
/// raises an error.
pub fn optinteger(&mut self, narg: i32, d: isize) -> isize {
#![inline(always)]
unsafe { self.as_extern().optinteger(narg, d) }
}
/// Checks whether the function argument `narg` has type `t`.
pub fn checktype(&mut self, narg: i32, t: Type) {
#![inline(always)]
unsafe { self.as_extern().checktype(narg, t) }
}
/// Checks whether the function has an argument of any type (including nil)
/// at position `narg`.
pub fn checkany(&mut self, narg: i32) {
#![inline(always)]
unsafe { self.as_extern().checkany(narg) }
}
/// If the registry already has the key `tname`, returns `false`.
/// Otherwise, creates a new table to be used as a metatable for userdata,
/// adds it to the registry with key `tname`, and returns `true`.
///
/// In both cases pushes onto the stack the final value associated with
/// `tname` in the registry.
pub fn newmetatable(&mut self, tname: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().newmetatable(tname) }
}
/// Checks whether the function argument `narg` is a userdata of the type
/// `tname` (see newmetatable()). The userdata pointer is returned.
pub fn checkudata(&mut self, narg: i32, tname: &str) -> *mut libc::c_void {
#![inline(always)]
unsafe { self.as_extern().checkudata(narg, tname) }
}
/// Pushes onto the stack a string identifying the current position of the
/// control at level `lvl` in the call stack.
/// Level 0 is the running function, level 1 is the function that called
/// the running function, etc.
pub fn where_(&mut self, lvl: i32) {
#![inline(always)]
unsafe { self.as_extern().where_(lvl) }
}
/// Raises an error with the given string.
/// It also adds at the beginning of the message the file name and line
/// number where the error occurred, if this information is available.
pub fn errorstr(&mut self, s: &str) -> ! {
#![inline(always)]
unsafe { self.as_extern().errorstr(s) }
}
/// Checks whether the function arg `narg` is a string and searches for
/// this string in `lst`. The first element of each tuple is compared
/// against, and if a match is found, the second element is returned.
/// Raises an error if the argument is not a string or the string cannot be
/// found.
///
/// If `def` is not None, the function uses `def` as a default value when
/// there is no argument `narg` or this argument is nil.
///
/// Fails the task if `def` or any list key has interior NULs
pub fn checkoption<'a, T>(&mut self, narg: i32, def: Option<&str>, lst: &'a [(&str,T)])
-> &'a T {
#![inline(always)]
unsafe { self.as_extern().checkoption(narg, def, lst) }
}
/// Creates and returns a reference, in the table at index `t`, for the
/// object at the top of the stack (and pops the object).
///
/// A reference is a unique integer key. As long as you do not manually add
/// integer keys into table `t`, ref_() ensures the uniqueness of the key
/// it returns. You can retrieve an object referred by reference `r` by
/// calling `L.rawget(t, r)`. Method unref() frees a reference and its
/// associated object.
///
/// If the object at the top of the stack is nil, ref_() returns the
/// constant RefNil. The constant NoRef is guaranteed to be different from
/// any reference returned by ref_().
pub fn ref_(&mut self, t: i32) -> i32 {
#![inline(always)]
unsafe { self.as_extern().ref_(t) }
}
/// Releases reference `r` from the table at index `t` (see ref_()). The
/// entry is removed from the table, so that the referred object can be
/// collected. The reference `r` is also freed to be used again.
///
/// If ref is NoRef or RefNil, unref() does nothing.
pub fn unref(&mut self, t: i32, r: i32) {
#![inline(always)]
unsafe { self.as_extern().unref(t, r) }
}
/// Loads a file as a Lua chunk (but does not run it).
/// If the `filename` is None, this loads from standard input.
/// Fails the task if `filename` has any interior NULs.
pub fn loadfile(&mut self, filename: Option<&path::Path>) -> Result<(),LoadFileError> {
#![inline(always)]
unsafe { self.as_extern().loadfile(filename) }
}
/// Loads a buffer as a Lua chunk (but does not run it).
/// As far as Rust is concerned, this differ from loadstring() in that a
/// name for the chunk is provided. It also allows for NUL bytes, but I
/// expect Lua won't like those.
/// Fails the task if `name` has any interior NULs.
pub fn loadbuffer(&mut self, buf: &str, name: &str) -> Result<(),LoadError> {
#![inline(always)]
unsafe { self.as_extern().loadbuffer(buf, name) }
}
/// Loads a string as a Lua chunk (but does not run it).
/// Fails the task if `s` has any interior NULs.
pub fn loadstring(&mut self, s: &str) -> Result<(),LoadError> {
#![inline(always)]
unsafe { self.as_extern().loadstring(s) }
}
/// Creates a copy of string `s` by replacing any occurrence of the string
/// `p` with the string `r`. Pushes the resulting string on the stack and
/// returns it.
pub fn gsub<'a>(&'a mut self, s: &str, p: &str, r: &str) -> &'a str {
#![inline(always)]
unsafe { mem::transmute(self.as_extern().gsub(s, p, r)) }
}
/// Checks whether `cond` is true. If not, raises an error with the
/// following message, where `func` is retrieved from the call stack:
///
/// bad argument #<narg> to <func> (<extramsg>)
///
/// Fails the task if `extramsg` has interior NULs.
pub fn argcheck(&mut self, cond: bool, narg: i32, extramsg: &str) {
#![inline(always)]
unsafe { self.as_extern().argcheck(cond, narg, extramsg) }
}
/// Loads and runs the given file. It returns `true` if there are no errors
/// or `false` in case of errors.
pub fn dofile(&mut self, filename: Option<&path::Path>) -> bool {
#![inline(always)]
unsafe { self.as_extern().dofile(filename) }
}
/// Loads and runs the given string. It returns `true` if there are no
/// errors or `false` in case of errors.
pub fn dostring(&mut self, s: &str) -> bool {
#![inline(always)]
unsafe { self.as_extern().dostring(s) }
}
/// Pushes onto the stack the metatable associated with the name `tname` in
/// the registry (see newmetatable()).
pub fn getmetatable_reg(&mut self, tname: &str) {
#![inline(always)]
unsafe { self.as_extern().getmetatable_reg(tname) }
}
/// Initializes and returns a Buffer
pub fn buffinit<'a>(&'a mut self) -> Buffer<'a> {
#![inline(always)]
self.as_extern().buffinit()
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub unsafe fn registerlib(&mut self, libname: Option<&str>, l: &[(&str,CFunction)]) {
// internally, luaL_registerlib seems to use 4 stack slots
self.checkstack_(4);
if libname.is_none() {
luaassert!(self, self.gettop() >= 1, "registerlib: stack underflow");
}
self.as_raw().registerlib(libname, l)
}
pub unsafe fn getmetafield(&mut self, obj: i32, e: &str) -> bool {
self.check_acceptable(obj);
self.checkstack_(2); // internally, luaL_getmetafield uses 2 stack slots
self.as_raw().getmetafield(obj, e)
}
pub unsafe fn callmeta(&mut self, obj: i32, e: &str) -> bool {
self.check_acceptable(obj);
self.checkstack_(2); // internally, luaL_callmeta uses 2 stack slots
self.as_raw().callmeta(obj, e)
}
pub unsafe fn typerror(&mut self, narg: i32, tname: &str) -> ! {
self.check_acceptable(narg);
// NB: stack checking is not necessary
self.as_raw().typerror(narg, tname)
}
pub unsafe fn argerror(&mut self, narg: i32, extramsg: &str) -> ! {
// NB: stack checking is not necessary
self.as_raw().argerror(narg, extramsg)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn checkstring(&mut self, narg: i32) -> Option<&'static str> {
self.check_acceptable(narg);
self.as_raw().checkstring(narg)
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn checkbytes(&mut self, narg: i32) -> &'static [u8] {
self.check_acceptable(narg);
self.as_raw().checkbytes(narg)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn optstring(&mut self, narg: i32, d: &'static str) -> Option<&'static str> {
self.check_acceptable(narg);
self.as_raw().optstring(narg, d)
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn optbytes(&mut self, narg: i32, d: &'static [u8]) -> &'static [u8] {
self.check_acceptable(narg);
self.as_raw().optbytes(narg, d)
}
pub unsafe fn checknumber(&mut self, narg: i32) -> f64 {
self.check_acceptable(narg);
self.as_raw().checknumber(narg)
}
pub unsafe fn optnumber(&mut self, narg: i32, d: f64) -> f64 {
self.check_acceptable(narg);
self.as_raw().optnumber(narg, d)
}
pub unsafe fn checkinteger(&mut self, narg: i32) -> isize {
self.check_acceptable(narg);
self.as_raw().checkinteger(narg)
}
pub unsafe fn optinteger(&mut self, narg: i32, d: isize) -> isize {
self.check_acceptable(narg);
self.as_raw().optinteger(narg, d)
}
pub unsafe fn checktype(&mut self, narg: i32, t: Type) {
self.check_acceptable(narg);
self.as_raw().checktype(narg, t)
}
pub unsafe fn checkany(&mut self, narg: i32) {
self.check_acceptable(narg);
self.as_raw().checkany(narg)
}
pub unsafe fn newmetatable(&mut self, tname: &str) -> bool {
self.checkstack_(2); // uses 1 or 2 stack slots internally
self.as_raw().newmetatable(tname)
}
pub unsafe fn checkudata(&mut self, narg: i32, tname: &str) -> *mut libc::c_void {
self.check_acceptable(narg);
self.checkstack_(2); // uses 2 stack slots internally
self.as_raw().checkudata(narg, tname)
}
pub unsafe fn where_(&mut self, lvl: i32) {
// luaL_where() internally uses lua_pushfstring(), which manages stack size itself
// so we don't need to call checkstack()
self.as_raw().where_(lvl)
}
pub unsafe fn errorstr(&mut self, s: &str) -> ! {
self.checkstack_(2);
self.as_raw().errorstr(s)
}
pub unsafe fn checkoption<'a, T>(&mut self, narg: i32, def: Option<&str>, lst: &'a [(&str,T)])
-> &'a T {
self.check_acceptable(narg);
self.as_raw().checkoption(narg, def, lst)
}
pub unsafe fn ref_(&mut self, t: i32) -> i32 {
self.check_valid(t, true);
self.checkstack_(1); // luaL_ref internally uses 1 stack slot
self.as_raw().ref_(t)
}
pub unsafe fn unref(&mut self, t: i32, r: i32) {
self.check_acceptable(t);
self.checkstack_(1); // luaL_unref internally uses 1 stack slot
self.as_raw().unref(t, r)
}
pub unsafe fn loadfile(&mut self, filename: Option<&path::Path>) -> Result<(),LoadFileError> {
self.checkstack_(1);
self.as_raw().loadfile(filename)
}
pub unsafe fn loadbuffer(&mut self, buf: &str, name: &str) -> Result<(),LoadError> {
self.checkstack_(1);
self.as_raw().loadbuffer(buf, name)
}
pub unsafe fn loadstring(&mut self, s: &str) -> Result<(),LoadError> {
self.checkstack_(1);
self.as_raw().loadstring(s)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn gsub(&mut self, s: &str, p: &str, r: &str) -> &'static str {
self.checkstack_(MINSTACK/2);
self.as_raw().gsub(s, p, r)
}
pub unsafe fn argcheck(&mut self, cond: bool, narg: i32, extramsg: &str) {
// NB: stack checking is not necessary
self.as_raw().argcheck(cond, narg, extramsg)
}
pub unsafe fn dofile(&mut self, filename: Option<&path::Path>) -> bool {
self.checkstack_(1);
self.as_raw().dofile(filename)
}
pub unsafe fn dostring(&mut self, s: &str) -> bool {
self.checkstack_(1);
self.as_raw().dostring(s)
}
pub unsafe fn getmetatable_reg(&mut self, tname: &str) {
self.as_raw().getmetatable_reg(tname)
}
pub fn buffinit<'a: 'l>(&'a mut self) -> Buffer<'a> {
#![inline]
let mut B = aux::raw::luaL_Buffer{
p: ptr::null_mut(),
lvl: 0,
L: self.L,
buffer: [0; aux::raw::LUAL_BUFFERSIZE as usize]
};
unsafe { aux::raw::luaL_buffinit(self.L, &mut B); }
Buffer{ B: B, L: self }
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub unsafe fn registerlib(&mut self, libname: Option<&str>, l: &[(&str,CFunction)]) {
#![inline]
let mut cstrs = Vec::with_capacity(l.len());
let mut l_ = Vec::with_capacity(l.len()+1);
for &(name, func) in l.iter() {
let cstr = CString::new(name).unwrap();
l_.push(aux::raw::luaL_Reg{ name: cstr.as_ptr(), func: Some(func) });
cstrs.push(cstr);
}
l_.push(aux::raw::luaL_Reg{ name: ptr::null(), func: None });
let libcstr = libname.map(|s| CString::new(s).unwrap());
let libname_ = libcstr.map_or(ptr::null(), |cstr| cstr.as_ptr());
aux::raw::luaL_register(self.L, libname_, l_.as_ptr())
}
pub unsafe fn getmetafield(&mut self, obj: i32, e: &str) -> bool {
#![inline]
let cstr = CString::new(e).unwrap();
aux::raw::luaL_getmetafield(self.L, obj as c_int, cstr.as_ptr()) != 0
}
pub unsafe fn callmeta(&mut self, obj: i32, e: &str) -> bool {
#![inline]
let cstr = CString::new(e).unwrap();
aux::raw::luaL_callmeta(self.L, obj as c_int, cstr.as_ptr()) != 0
}
pub unsafe fn typerror(&mut self, narg: i32, tname: &str) -> ! {
#![inline]
let cstr = CString::new(tname).unwrap();
aux::raw::luaL_typerror(self.L, narg as c_int, cstr.as_ptr());
unreachable!()
}
pub unsafe fn argerror(&mut self, narg: i32, extramsg: &str) -> ! {
#![inline]
let cstr = CString::new(extramsg).unwrap();
aux::raw::luaL_argerror(self.L, narg as c_int, cstr.as_ptr());
unreachable!()
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
// TODO: change return type to use core::str::Utf8Error
pub unsafe fn checkstring(&mut self, narg: i32) -> Option<&'static str> {
#![inline]
str::from_utf8(self.checkbytes(narg)).ok()
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of hte value on the stack.
pub unsafe fn checkbytes(&mut self, narg: i32) -> &'static [u8] {
#![inline]
let mut sz: libc::size_t = 0;
let s = aux::raw::luaL_checklstring(self.L, narg, &mut sz);
let buf = s as *const u8;
slice::from_raw_parts(buf, sz as usize)
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
// TODO: change return type to use core::str::Utf8Error
pub unsafe fn optstring(&mut self, narg: i32, d: &'static str) -> Option<&'static str> {
#![inline]
str::from_utf8(self.optbytes(narg, d.as_bytes())).ok()
}
/// Note: the byte vector is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn optbytes(&mut self, narg: i32, d: &'static [u8]) -> &'static [u8] {
#![inline]
let mut sz: libc::size_t = 0;
let cstr = CString::new(d).unwrap();
let s = aux::raw::luaL_optlstring(self.L, narg, cstr.as_ptr(), &mut sz);
let buf = s as *const u8;
slice::from_raw_parts(buf, sz as usize)
}
pub unsafe fn checknumber(&mut self, narg: i32) -> f64 {
#![inline]
aux::raw::luaL_checknumber(self.L, narg as c_int) as f64
}
pub unsafe fn optnumber(&mut self, narg: i32, d: f64) -> f64 {
#![inline]
aux::raw::luaL_optnumber(self.L, narg as c_int, d as raw::lua_Number) as f64
}
pub unsafe fn checkinteger(&mut self, narg: i32) -> isize {
#![inline]
aux::raw::luaL_checkinteger(self.L, narg as c_int) as isize
}
pub unsafe fn optinteger(&mut self, narg: i32, d: isize) -> isize {
#![inline]
aux::raw::luaL_optinteger(self.L, narg as c_int, d as raw::lua_Integer) as isize
}
pub unsafe fn checktype(&mut self, narg: i32, t: Type) {
#![inline]
aux::raw::luaL_checktype(self.L, narg as c_int, t as c_int)
}
pub unsafe fn checkany(&mut self, narg: i32) {
#![inline]
aux::raw::luaL_checkany(self.L, narg as c_int)
}
pub unsafe fn newmetatable(&mut self, tname: &str) -> bool {
#![inline]
let cstr = CString::new(tname).unwrap();
aux::raw::luaL_newmetatable(self.L, cstr.as_ptr()) != 0
}
pub unsafe fn checkudata(&mut self, narg: i32, tname: &str) -> *mut libc::c_void {
#![inline]
let cstr = CString::new(tname).unwrap();
aux::raw::luaL_checkudata(self.L, narg as c_int, cstr.as_ptr())
}
pub unsafe fn where_(&mut self, lvl: i32) {
#![inline]
aux::raw::luaL_where(self.L, lvl as c_int)
}
pub unsafe fn errorstr(&mut self, s: &str) -> ! {
#![inline]
self.where_(1);
self.pushstring(s);
self.concat(2);
raw::lua_error(self.L);
unreachable!()
}
pub unsafe fn checkoption<'a, T>(&mut self, narg: i32, def: Option<&str>, lst: &'a [(&str,T)])
-> &'a T {
let def_cstr = def.map(|d| CString::new(d).unwrap());
let defp = def_cstr.as_ref().map_or(ptr::null(), |c| c.as_ptr());
let mut lst_cstrs = Vec::with_capacity(lst.len());
let mut lstv = Vec::with_capacity(lst.len()+1);
for &(k,_) in lst.iter() {
let cstr = CString::new(k).unwrap();
lstv.push(cstr.as_ptr());
lst_cstrs.push(cstr);
}
lstv.push(ptr::null());
let i = aux::raw::luaL_checkoption(self.L, narg as c_int, defp, lstv.as_ptr()) as usize;
&lst[i].1
}
pub unsafe fn ref_(&mut self, t: i32) -> i32 {
#![inline]
aux::raw::luaL_ref(self.L, t as c_int) as i32
}
pub unsafe fn unref(&mut self, t: i32, r: i32) {
#![inline]
aux::raw::luaL_unref(self.L, t as c_int, r as c_int)
}
pub unsafe fn loadfile(&mut self, filename: Option<&path::Path>) -> Result<(),LoadFileError> {
#![inline]
let cstr = filename.map(|p| CString::new(p.as_vec()).unwrap());
let ptr = cstr.as_ref().map_or(ptr::null(), |cstr| cstr.as_ptr());
match aux::raw::luaL_loadfile(self.L, ptr) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadFileError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadFileError::ErrMem),
aux::raw::LUA_ERRFILE => Err(LoadFileError::ErrFile),
_ => self.errorstr("loadfile: unexpected error from luaL_loadfile")
}
}
pub unsafe fn loadbuffer(&mut self, buf: &str, name: &str) -> Result<(),LoadError> {
#![inline]
let bp = buf.as_ptr() as *const libc::c_char;
let bsz = buf.len() as libc::size_t;
let cstr = CString::new(name).unwrap();
match aux::raw::luaL_loadbuffer(self.L, bp, bsz, cstr.as_ptr()) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadError::ErrMem),
_ => self.errorstr("loadbuffer: unexpected error from luaL_loadbuffer")
}
}
pub unsafe fn loadstring(&mut self, s: &str) -> Result<(),LoadError> {
#![inline]
let cstr = CString::new(s).unwrap();
match aux::raw::luaL_loadstring(self.L, cstr.as_ptr()) {
0 => Ok(()),
raw::LUA_ERRSYNTAX => Err(LoadError::ErrSyntax),
raw::LUA_ERRMEM => Err(LoadError::ErrMem),
_ => self.errorstr("loadstring: unexpected error from luaL_loadstring")
}
}
/// Note: the string is returned as 'static to prevent borrowing the
/// RawState, but its lifetime is actually that of the value on the stack.
pub unsafe fn gsub(&mut self, s: &str, p: &str, r: &str) -> &'static str {
#![inline]
let (s_, p_, r_) = (CString::new(s).unwrap(),
CString::new(p).unwrap(),
CString::new(r).unwrap());
let (sp, pp, rp) = (s_.as_ptr(), p_.as_ptr(), r_.as_ptr());
let res = aux::raw::luaL_gsub(self.L, sp, pp, rp);
let cstr = CStr::from_ptr(res);
let res = str::from_utf8(cstr.to_bytes()).unwrap();
mem::transmute::<&str,&'static str>(res)
}
pub unsafe fn argcheck(&mut self, cond: bool, narg: i32, extramsg: &str) {
#![inline]
let cstr = CString::new(extramsg).unwrap();
aux::raw::luaL_argcheck(self.L, cond, narg as c_int, cstr.as_ptr())
}
pub unsafe fn dofile(&mut self, filename: Option<&path::Path>) -> bool {
#![inline]
let cstr = filename.map(|p| CString::new(p.as_vec()).unwrap());
let name = cstr.map_or(ptr::null(), |c| c.as_ptr());
aux::raw::luaL_dofile(self.L, name) == 0
}
pub unsafe fn dostring(&mut self, s: &str) -> bool {
#![inline]
aux::raw::luaL_dostring(self.L, CString::new(s).unwrap().as_ptr()) == 0
}
pub unsafe fn getmetatable_reg(&mut self, tname: &str) {
#![inline]
self.getfield(REGISTRYINDEX, tname)
}
}
/// String buffer for building Lua strings piecemeal.
///
/// The Buffer assumes it needs longjmp safety, like ExternState.
pub struct Buffer<'a> {
B: aux::raw::luaL_Buffer,
/// A &mut pointer to the ExternState that created this Buffer.
/// The buffer internally holds on to the *lua_Buffer that the State wraps,
/// so to ensure safety it also borrows the &mut ExternState. Use this
/// field to get mutable access to the State while the buffer is alive.
pub L: &'a mut ExternState<'a>
}
/// Size of the internal buffer used by Buffer and returned by prepbuffer()
pub const BUFFERSIZE: usize = aux::raw::LUAL_BUFFERSIZE as usize;
impl<'a> Buffer<'a> {
/// Adds the byte `c` to the buffer.
pub unsafe fn addbyte(&mut self, c: u8) {
#![inline]
// don't call through to luaL_addchar, because we want to insert a call to checkstack()
// iff we have to prep the buffer.
let startp: *mut libc::c_char = &mut self.B.buffer[0];
if self.B.p >= startp.offset(aux::raw::LUAL_BUFFERSIZE as isize) {
self.L.checkstack_(1);
aux::raw::luaL_prepbuffer(&mut self.B);
}
*self.B.p = c as libc::c_char;
self.B.p = self.B.p.offset(1);
}
/// Adds the char `c` as utf-8 bytes to the buffer.
pub unsafe fn addchar(&mut self, c: char) {
#![inline]
let mut buf = [0u8; 4];
let count = c.encode_utf8(&mut buf).unwrap();
self.addbytes(&buf[..count]);
}
/// Adds to the buffer a string of length `n` previously copied to the
/// buffer area (see prepbuffer()).
pub unsafe fn addsize(&mut self, n: usize) {
#![inline]
aux::raw::luaL_addsize(&mut self.B, n as libc::size_t)
}
/// Returns a pointer to an array of size BUFFERSIZE where you can copy a
/// string to be added to the buffer. After copying the string into this
/// space you must call addsize() with the size of the string to actually
/// add it to the buffer.
pub unsafe fn prepbuffer(&mut self) -> &mut [u8; aux::raw::LUAL_BUFFERSIZE as usize] {
#![inline]
self.L.checkstack_(1);
// luaL_prepbuffer ends up returning the buffer field.
// Rather than unsafely trying to transmute that to the array, just return the field
// ourselves.
aux::raw::luaL_prepbuffer(&mut self.B);
mem::transmute::<&mut [i8; aux::raw::LUAL_BUFFERSIZE as usize],
&mut [u8; aux::raw::LUAL_BUFFERSIZE as usize]>(&mut self.B.buffer)
}
/// Adds the string to the buffer.
pub unsafe fn addstring(&mut self, s: &str) {
#![inline]
self.addbytes(s.as_bytes())
}
/// Adds the byte vector to the buffer.
pub unsafe fn addbytes(&mut self, bytes: &[u8]) {
#![inline]
// luaL_addlstring() just iterates over the string calling addchar().
// We want our checkstack calls, so let's just do that here instead directly.
for &b in bytes.iter() {
self.addbyte(b);
}
}
/// Adds the value at the top of the stack to the buffer. Pops the value.
///
/// This is the only method on string buffers that can (and must) be called
/// with an extra element on the stack, which is the value to be added to
/// the buffer.
pub unsafe fn addvalue(&mut self) {
#![inline]
luaassert!(self.L, self.L.gettop() >= 1, "addvalue: stack underflow");
self.L.checkstack_(1); // luaL_addvalue() needs this if the value is too large
aux::raw::luaL_addvalue(&mut self.B)
}
/// Finishes the use of the buffer, leaving the final string on top of the
/// stack.
pub unsafe fn pushresult(mut self) {
#![inline]
self.L.checkstack_(1); // possibly needed for the emptybuffer
aux::raw::luaL_pushresult(&mut self.B)
}
}
/* Debug API */
/// Event codes
#[derive(Copy)]
pub enum DebugEvent {
/// The call hook is called when the interpreter calls a function. The hook is called
/// just after Lua enters the new function, before the function gets its arguments.
HookCall = raw::LUA_HOOKCALL as isize,
/// The return hook is called when the interpreter returns from a function. The hook is
/// called just before Lua leaves the function. You have no access to the values to be
/// returned by the function.
HookRet = raw::LUA_HOOKRET as isize,
/// The line hook is called when the interpreter is about to start the execution of a new
/// line of code, or when it jumps back in the code (even to the same line).
/// (This event only happens while Lua is executing a Lua function.)
HookLine = raw::LUA_HOOKLINE as isize,
/// The count hook is called after the interpreter executes every `count` instructions.
/// (This event only happens while Lua is executing a Lua function.)
HookCount = raw::LUA_HOOKCOUNT as isize,
/// The tailret event is used when a HookRet hook is called while simulating a return from
/// a function that did a tail call; in this case, it is useless to call getinfo().
HookTailRet = raw::LUA_HOOKTAILRET as isize
}
impl DebugEvent {
/// Converts a c_int event code to a DebugEvent.
pub fn from_event(event: c_int) -> Option<DebugEvent> {
match event {
raw::LUA_HOOKCALL => Some(DebugEvent::HookCall),
raw::LUA_HOOKRET => Some(DebugEvent::HookRet),
raw::LUA_HOOKLINE => Some(DebugEvent::HookLine),
raw::LUA_HOOKCOUNT => Some(DebugEvent::HookCount),
raw::LUA_HOOKTAILRET => Some(DebugEvent::HookTailRet),
_ => None
}
}
}
/// Event mask for HookCall
pub const MASKCALL: i32 = raw::LUA_MASKCALL as i32;
/// Event mask for HookRet
pub const MASKRET: i32 = raw::LUA_MASKRET as i32;
/// Event mask for HookLine
pub const MASKLINE: i32 = raw::LUA_MASKLINE as i32;
/// Event mask for HookCount
pub const MASKCOUNT: i32 = raw::LUA_MASKCOUNT as i32;
/// Type for functions to be called by the debugger in specific events
pub type Hook = raw::lua_Hook;
/// A structure used to carry different peices of information about an active function.
/// getstack() fills only the private part of this structure, for later use. To fill the other
/// fields of lua_Debug with useful information, call getinfo().
pub type Debug = raw::lua_Debug;
impl Debug {
/// Returns a newly-zeroed instance of Debug
pub fn new() -> Debug {
#![inline]
std::default::Default::default()
}
}
impl State {
/// Gets information about the interpreter runtime stack.
///
/// This function returns a Debug structure with an identification of the
/// activation record of the function executing at a given level. Level 0
/// is the current running function, whereas level n+1 is the function that
/// has called level n. When there are no errors, getstack() returns
/// Some(Debug); when called with a level greater than the stack depth, it
/// returns None.
pub fn getstack(&mut self, level: i32) -> Option<Debug> {
#![inline(always)]
self.as_extern().getstack(level)
}
/// Returns information about a specific function or function invocation.
///
/// To get information about a function invocation, the parameter `ar` must
/// ve a valid activation record that was returned by a previous call to
/// getstack() or given as argument to a hook.
///
/// To get information about a function you push it onto the stack and
/// start the `what` string with the character '>'. (In that case,
/// getinfo() pops the function in the top of the stack.) For instance, to
/// know in which line a function `f` was defined, you can write the
/// following code:
///
/// let ar = Debug::new();
/// L.getfield(GLOBALSINDEX, "f"); // get global 'f'
/// L.getinfo(">S", &mut ar);
/// println!("{}", ar.linedefined);
///
/// Each character in the string `what` selects some fields of the
/// structure `ar` to be filled or a value to be pushed on the stack:
///
/// * 'n': fills in the fields `name` and `namewhat`
/// * 'S': fills in the fields `source`, `short_src`, `linedefined`,
/// `lastlinedefined`, and `what`
/// * 'l': fills in the field `currentline`
/// * 'u': fills in the field `nups`
/// * 'f': pushes onto the stack the function that is running at the given
/// level
/// * 'L': pushes onto the stack a table whose indices are the numbers of
/// the lines that are valid on the function. (A valid line is a
/// line with some associated code, that is, a line where you can
/// put a break point. Non-valid lines include empty lines and
/// comments.)
///
/// This function returns `false` on error (for instance, an invalid option
/// in `what`).
///
/// Fails the task if `what` has interior NULs.
pub fn getinfo(&mut self, what: &str, ar: &mut Debug) -> bool {
#![inline(always)]
unsafe { self.as_extern().getinfo(what, ar) }
}
/// Gets information about a local variable of a given activation record.
/// The parameter `ar` must be a valid activation record that was filled by
/// a previous call to getstack() or given as an argument to a hook. The
/// index `n` selects which local variable to inspect (1 is the first
/// parameter or active local variable, and so on, until the last active
/// local variable). getlocal() pushes the variable's value onto the stack
/// and returns its name.
///
/// Variable names starting with '(' represent internal variables (loop
/// control variables, temporaries, and C function locals).
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn getlocal<'a>(&mut self, ar: &'a Debug, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().getlocal(ar, n) }
}
/// Sets the value of a local variable of a given activation record.
/// Parameters `ar` and `n` are as in getlocal(). setlocal() assigns the
/// value at the top of the stack to the variable and returns its name. It
/// also pops the value from the stack.
///
/// Returns None (and pops nothing) when the index is greater than the
/// number of active local variables.
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn setlocal<'a>(&mut self, ar: &'a mut Debug, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().setlocal(ar, n) }
}
/// Gets information about a closure's upvalue. (For Lua functions,
/// upvalues are the external local variables that the function uses, and
/// that are consequently included in its closure.) getupvalue() gets the
/// index `n` of an upvalue, pushes the upvalue's value onto the stack, and
/// returns its name. `funcindex` points to the closure in the stack.
/// (Upvalues have no particular order, as they are active through the
/// whole function. So, they are numbered in an arbitrary order.)
///
/// Returns None (and pushes nothing) when the index is greater than the
/// number of upvalues. For C functions, this function uses the empty
/// string "" as a name for all upvalues.
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn getupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().getupvalue(funcidx, n) }
}
/// Sets the value of a closure's upvalue. It assigns the value at the top
/// of the stack to the upvalue and returns its name. It also pops the
/// value from the stack. Parameters `funcindex` and `n` are as in
/// getupvalue().
///
/// Returns None (and pops nothing) when the index is greater than the
/// number of upvalues.
///
/// The name is returned as a &[u8] to avoid confusion with failed utf-8
/// decoding vs invalid indices.
pub fn setupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline(always)]
unsafe { self.as_extern().setupvalue(funcidx, n) }
}
/// Sets the debugging hook function.
///
/// Argument `f` is the hook function. `mask` specifies on which events the
/// hook will be called: it is formed by a bitwise OR of the Mask*
/// constants in DebugEvent. The `count` argument is only meaningful when
/// the mask includes DebugEvent::MaskCount.
///
/// A hook is disabled by setting `mask` to zero.
pub fn sethook(&mut self, f: Hook, mask: i32, count: i32) {
#![inline(always)]
self.as_extern().sethook(f, mask, count)
}
/// Returns the current hook function
pub fn gethook(&mut self) -> Hook {
#![inline(always)]
self.as_extern().gethook()
}
/// Returns the current hook mask
pub fn gethookmask(&mut self) -> i32 {
#![inline(always)]
self.as_extern().gethookmask()
}
/// Returns the current hook count
pub fn gethookcount(&mut self) -> i32 {
#![inline(always)]
self.as_extern().gethookcount()
}
}
#[allow(missing_docs)]
impl<'l> ExternState<'l> {
pub fn getstack(&mut self, level: i32) -> Option<Debug> {
self.as_raw().getstack(level)
}
pub unsafe fn getinfo(&mut self, what: &str, ar: &mut Debug) -> bool {
if what.starts_with(">") {
luaassert!(self, self.gettop() >= 1 && self.isfunction(-1),
"getinfo: top stack value is not a function");
}
if what.find(['f', 'L'].as_slice()).is_some() {
self.checkstack_(1);
}
self.as_raw().getinfo(what, ar)
}
pub unsafe fn getlocal<'a>(&mut self, ar: &'a Debug, n: i32) -> Option<&'a [u8]> {
self.checkstack_(1);
self.as_raw().getlocal(ar, n)
}
pub unsafe fn setlocal<'a>(&mut self, ar: &'a mut Debug, n: i32) -> Option<&'a [u8]> {
luaassert!(self, self.gettop() >= 1, "setlocal: stack underflow");
self.as_raw().setlocal(ar, n)
}
pub unsafe fn getupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
self.check_acceptable(funcidx);
self.checkstack_(1);
self.as_raw().getupvalue(funcidx, n)
}
pub unsafe fn setupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
self.check_acceptable(funcidx);
self.checkstack_(1);
self.as_raw().setupvalue(funcidx, n)
}
pub fn sethook(&mut self, f: Hook, mask: i32, count: i32) {
self.as_raw().sethook(f, mask, count)
}
pub fn gethook(&mut self) -> Hook {
self.as_raw().gethook()
}
pub fn gethookmask(&mut self) -> i32 {
self.as_raw().gethookmask()
}
pub fn gethookcount(&mut self) -> i32 {
self.as_raw().gethookcount()
}
}
#[allow(missing_docs)]
impl<'l> RawState<'l> {
pub fn getstack(&mut self, level: i32) -> Option<Debug> {
#![inline]
let mut ar: Debug = std::default::Default::default();
if unsafe { raw::lua_getstack(self.L, level as c_int, &mut ar) != 0 } {
Some(ar)
} else {
None
}
}
pub unsafe fn getinfo(&mut self, what: &str, ar: &mut Debug) -> bool {
#![inline]
raw::lua_getinfo(self.L, CString::new(what).unwrap().as_ptr(), ar) != 0
}
pub unsafe fn getlocal<'a>(&mut self, ar: &'a Debug, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_getlocal(self.L, ar, n as c_int);
c_str_to_bytes(res)
}
pub unsafe fn setlocal<'a>(&mut self, ar: &'a mut Debug, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_setlocal(self.L, ar, n as c_int);
c_str_to_bytes(res)
}
pub unsafe fn getupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_getupvalue(self.L, funcidx as c_int, n as c_int);
c_str_to_bytes(res)
}
pub unsafe fn setupvalue<'a>(&'a mut self, funcidx: i32, n: i32) -> Option<&'a [u8]> {
#![inline]
let res = raw::lua_setupvalue(self.L, funcidx as c_int, n as c_int);
c_str_to_bytes(res)
}
pub fn sethook(&mut self, f: Hook, mask: i32, count: i32) {
#![inline]
unsafe { raw::lua_sethook(self.L, f, mask as c_int, count as c_int); }
}
pub fn gethook(&mut self) -> Hook {
#![inline]
unsafe { raw::lua_gethook(self.L) }
}
pub fn gethookmask(&mut self) -> i32 {
#![inline]
unsafe { raw::lua_gethookmask(self.L) as i32 }
}
pub fn gethookcount(&mut self) -> i32 {
#![inline]
unsafe { raw::lua_gethookcount(self.L) as i32 }
}
}
unsafe fn c_str_to_bytes<'a>(cstr: *const libc::c_char) -> Option<&'a [u8]> {
#![inline]
if cstr.is_null() {
None
} else {
return Some(CStr::from_ptr(cstr).to_bytes())
}
}
|
//! A library providing a generic connection pool.
#![feature(unsafe_destructor, phase)]
#![warn(missing_doc)]
#![doc(html_root_url="http://www.rust-ci.org/sfackler/r2d2/doc")]
#[phase(plugin, link)]
extern crate log;
extern crate serialize;
use std::comm;
use std::collections::{Deque, RingBuf};
use std::sync::{Arc, Mutex};
use std::fmt;
pub use config::Config;
mod config;
/// A trait which provides database-specific functionality.
pub trait PoolManager<C, E>: Send+Sync {
/// Attempts to create a new connection.
fn connect(&self) -> Result<C, E>;
/// Determines if the connection is still connected to the database.
///
/// A standard implementation would check if a simple query like `SELECT 1`
/// succeeds.
fn is_valid(&self, conn: &mut C) -> Result<(), E>;
/// *Quickly* determines if the connection is no longer usable.
///
/// This will be called synchronously every time a connection is returned
/// to the pool, so it should *not* block. If it returns `true`, the
/// connection will be discarded.
///
/// For example, an implementation might check if the underlying TCP socket
/// has disconnected. Implementations that do not support this kind of
/// fast health check may simply return `false`.
fn has_broken(&self, conn: &mut C) -> bool;
}
/// A trait which handles errors reported by the `PoolManager`.
pub trait ErrorHandler<E>: Send+Sync {
/// Handles an error.
fn handle_error(&self, error: E);
}
impl<E> ErrorHandler<E> for Box<ErrorHandler<E>+Sync+Send> {
fn handle_error(&self, error: E) {
(**self).handle_error(error)
}
}
/// An `ErrorHandler` which does nothing.
pub struct NoopErrorHandler;
impl<E> ErrorHandler<E> for NoopErrorHandler {
fn handle_error(&self, _: E) {}
}
/// An `ErrorHandler` which logs at the error level.
pub struct LoggingErrorHandler;
impl<E> ErrorHandler<E> for LoggingErrorHandler where E: fmt::Show {
fn handle_error(&self, error: E) {
error!("Error opening connection: {}", error);
}
}
/// An error type returned if pool creation fails.
#[deriving(PartialEq, Eq)]
pub enum NewPoolError<E> {
/// The provided pool configuration was invalid.
InvalidConfig(&'static str),
/// The manager returned an error when creating a connection.
ConnectionError(E),
}
impl<E> fmt::Show for NewPoolError<E> where E: fmt::Show {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidConfig(ref error) => write!(f, "Invalid config: {}", error),
ConnectionError(ref error) => write!(f, "Unable to create connections: {}", error),
}
}
}
enum Command<C> {
AddConnection,
TestConnection(C),
}
struct PoolInternals<C> {
conns: RingBuf<C>,
num_conns: uint,
}
struct InnerPool<C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
config: Config,
manager: M,
error_handler: H,
internals: Mutex<PoolInternals<C>>,
}
/// A generic connection pool.
pub struct Pool<C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
helper_chan: Mutex<Sender<Command<C>>>,
inner: Arc<InnerPool<C, E, M, H>>
}
impl<C, E, M, H> Pool<C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
/// Creates a new connection pool.
pub fn new(config: Config, manager: M, error_handler: H)
-> Result<Pool<C, E, M, H>, NewPoolError<E>> {
try!(config.validate().map_err(InvalidConfig));
let mut internals = PoolInternals {
conns: RingBuf::new(),
num_conns: config.pool_size,
};
for _ in range(0, config.pool_size) {
match manager.connect() {
Ok(conn) => internals.conns.push(conn),
Err(err) => return Err(ConnectionError(err)),
}
}
let inner = Arc::new(InnerPool {
config: config,
manager: manager,
error_handler: error_handler,
internals: Mutex::new(internals),
});
let (sender, receiver) = comm::channel();
// FIXME :(
let receiver = Arc::new(Mutex::new(receiver));
for _ in range(0, config.helper_tasks) {
let inner = inner.clone();
let receiver = receiver.clone();
spawn(proc() helper_task(receiver, inner));
}
Ok(Pool {
helper_chan: Mutex::new(sender),
inner: inner,
})
}
/// Retrieves a connection from the pool.
pub fn get<'a>(&'a self) -> Result<PooledConnection<'a, C, E, M, H>, ()> {
let mut internals = self.inner.internals.lock();
loop {
match internals.conns.pop_front() {
Some(mut conn) => {
drop(internals);
if self.inner.config.test_on_check_out {
match self.inner.manager.is_valid(&mut conn) {
Ok(()) => {}
Err(e) => {
self.inner.error_handler.handle_error(e);
internals = self.inner.internals.lock();
internals.num_conns -= 1;
continue;
}
}
}
return Ok(PooledConnection {
pool: self,
conn: Some(conn),
})
}
None => internals.cond.wait(),
}
}
}
fn put_back(&self, mut conn: C) {
// This is specified to be fast, but call it before locking anyways
let broken = self.inner.manager.has_broken(&mut conn);
let mut internals = self.inner.internals.lock();
if broken {
internals.num_conns -= 1;
} else {
internals.conns.push(conn);
internals.cond.signal();
}
}
}
fn helper_task<C, E, M, H>(receiver: Arc<Mutex<Receiver<Command<C>>>>,
inner: Arc<InnerPool<C, E, M, H>>)
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
loop {
let mut receiver = receiver.lock();
let res = receiver.recv_opt();
drop(receiver);
match res {
Ok(AddConnection) => add_connection(&*inner),
Ok(TestConnection(conn)) => test_connection(&*inner, conn),
Err(()) => break,
}
}
}
fn add_connection<C, E, M, H>(inner: &InnerPool<C, E, M, H>)
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
match inner.manager.connect() {
Ok(conn) => {
let mut internals = inner.internals.lock();
internals.conns.push(conn);
internals.num_conns += 1;
internals.cond.signal();
}
Err(err) => inner.error_handler.handle_error(err),
}
}
fn test_connection<C, E, M, H>(inner: &InnerPool<C, E, M, H>, mut conn: C)
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
let is_valid = inner.manager.is_valid(&mut conn);
let mut internals = inner.internals.lock();
match is_valid {
Ok(()) => internals.conns.push(conn),
Err(e) => {
inner.error_handler.handle_error(e);
internals.num_conns -= 1;
}
}
}
/// A smart pointer wrapping an underlying connection.
pub struct PooledConnection<'a, C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
pool: &'a Pool<C, E, M, H>,
conn: Option<C>,
}
#[unsafe_destructor]
impl<'a, C, E, M, H> Drop for PooledConnection<'a, C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
fn drop(&mut self) {
self.pool.put_back(self.conn.take().unwrap());
}
}
impl<'a, C, E, M, H> Deref<C> for PooledConnection<'a, C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
fn deref(&self) -> &C {
self.conn.as_ref().unwrap()
}
}
Fix log message
//! A library providing a generic connection pool.
#![feature(unsafe_destructor, phase)]
#![warn(missing_doc)]
#![doc(html_root_url="http://www.rust-ci.org/sfackler/r2d2/doc")]
#[phase(plugin, link)]
extern crate log;
extern crate serialize;
use std::comm;
use std::collections::{Deque, RingBuf};
use std::sync::{Arc, Mutex};
use std::fmt;
pub use config::Config;
mod config;
/// A trait which provides database-specific functionality.
pub trait PoolManager<C, E>: Send+Sync {
/// Attempts to create a new connection.
fn connect(&self) -> Result<C, E>;
/// Determines if the connection is still connected to the database.
///
/// A standard implementation would check if a simple query like `SELECT 1`
/// succeeds.
fn is_valid(&self, conn: &mut C) -> Result<(), E>;
/// *Quickly* determines if the connection is no longer usable.
///
/// This will be called synchronously every time a connection is returned
/// to the pool, so it should *not* block. If it returns `true`, the
/// connection will be discarded.
///
/// For example, an implementation might check if the underlying TCP socket
/// has disconnected. Implementations that do not support this kind of
/// fast health check may simply return `false`.
fn has_broken(&self, conn: &mut C) -> bool;
}
/// A trait which handles errors reported by the `PoolManager`.
pub trait ErrorHandler<E>: Send+Sync {
/// Handles an error.
fn handle_error(&self, error: E);
}
impl<E> ErrorHandler<E> for Box<ErrorHandler<E>+Sync+Send> {
fn handle_error(&self, error: E) {
(**self).handle_error(error)
}
}
/// An `ErrorHandler` which does nothing.
pub struct NoopErrorHandler;
impl<E> ErrorHandler<E> for NoopErrorHandler {
fn handle_error(&self, _: E) {}
}
/// An `ErrorHandler` which logs at the error level.
pub struct LoggingErrorHandler;
impl<E> ErrorHandler<E> for LoggingErrorHandler where E: fmt::Show {
fn handle_error(&self, error: E) {
error!("Connection error: {}", error);
}
}
/// An error type returned if pool creation fails.
#[deriving(PartialEq, Eq)]
pub enum NewPoolError<E> {
/// The provided pool configuration was invalid.
InvalidConfig(&'static str),
/// The manager returned an error when creating a connection.
ConnectionError(E),
}
impl<E> fmt::Show for NewPoolError<E> where E: fmt::Show {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidConfig(ref error) => write!(f, "Invalid config: {}", error),
ConnectionError(ref error) => write!(f, "Unable to create connections: {}", error),
}
}
}
enum Command<C> {
AddConnection,
TestConnection(C),
}
struct PoolInternals<C> {
conns: RingBuf<C>,
num_conns: uint,
}
struct InnerPool<C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
config: Config,
manager: M,
error_handler: H,
internals: Mutex<PoolInternals<C>>,
}
/// A generic connection pool.
pub struct Pool<C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
helper_chan: Mutex<Sender<Command<C>>>,
inner: Arc<InnerPool<C, E, M, H>>
}
impl<C, E, M, H> Pool<C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
/// Creates a new connection pool.
pub fn new(config: Config, manager: M, error_handler: H)
-> Result<Pool<C, E, M, H>, NewPoolError<E>> {
try!(config.validate().map_err(InvalidConfig));
let mut internals = PoolInternals {
conns: RingBuf::new(),
num_conns: config.pool_size,
};
for _ in range(0, config.pool_size) {
match manager.connect() {
Ok(conn) => internals.conns.push(conn),
Err(err) => return Err(ConnectionError(err)),
}
}
let inner = Arc::new(InnerPool {
config: config,
manager: manager,
error_handler: error_handler,
internals: Mutex::new(internals),
});
let (sender, receiver) = comm::channel();
// FIXME :(
let receiver = Arc::new(Mutex::new(receiver));
for _ in range(0, config.helper_tasks) {
let inner = inner.clone();
let receiver = receiver.clone();
spawn(proc() helper_task(receiver, inner));
}
Ok(Pool {
helper_chan: Mutex::new(sender),
inner: inner,
})
}
/// Retrieves a connection from the pool.
pub fn get<'a>(&'a self) -> Result<PooledConnection<'a, C, E, M, H>, ()> {
let mut internals = self.inner.internals.lock();
loop {
match internals.conns.pop_front() {
Some(mut conn) => {
drop(internals);
if self.inner.config.test_on_check_out {
match self.inner.manager.is_valid(&mut conn) {
Ok(()) => {}
Err(e) => {
self.inner.error_handler.handle_error(e);
internals = self.inner.internals.lock();
internals.num_conns -= 1;
continue;
}
}
}
return Ok(PooledConnection {
pool: self,
conn: Some(conn),
})
}
None => internals.cond.wait(),
}
}
}
fn put_back(&self, mut conn: C) {
// This is specified to be fast, but call it before locking anyways
let broken = self.inner.manager.has_broken(&mut conn);
let mut internals = self.inner.internals.lock();
if broken {
internals.num_conns -= 1;
} else {
internals.conns.push(conn);
internals.cond.signal();
}
}
}
fn helper_task<C, E, M, H>(receiver: Arc<Mutex<Receiver<Command<C>>>>,
inner: Arc<InnerPool<C, E, M, H>>)
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
loop {
let mut receiver = receiver.lock();
let res = receiver.recv_opt();
drop(receiver);
match res {
Ok(AddConnection) => add_connection(&*inner),
Ok(TestConnection(conn)) => test_connection(&*inner, conn),
Err(()) => break,
}
}
}
fn add_connection<C, E, M, H>(inner: &InnerPool<C, E, M, H>)
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
match inner.manager.connect() {
Ok(conn) => {
let mut internals = inner.internals.lock();
internals.conns.push(conn);
internals.num_conns += 1;
internals.cond.signal();
}
Err(err) => inner.error_handler.handle_error(err),
}
}
fn test_connection<C, E, M, H>(inner: &InnerPool<C, E, M, H>, mut conn: C)
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
let is_valid = inner.manager.is_valid(&mut conn);
let mut internals = inner.internals.lock();
match is_valid {
Ok(()) => internals.conns.push(conn),
Err(e) => {
inner.error_handler.handle_error(e);
internals.num_conns -= 1;
}
}
}
/// A smart pointer wrapping an underlying connection.
pub struct PooledConnection<'a, C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
pool: &'a Pool<C, E, M, H>,
conn: Option<C>,
}
#[unsafe_destructor]
impl<'a, C, E, M, H> Drop for PooledConnection<'a, C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
fn drop(&mut self) {
self.pool.put_back(self.conn.take().unwrap());
}
}
impl<'a, C, E, M, H> Deref<C> for PooledConnection<'a, C, E, M, H>
where C: Send, E: Send, M: PoolManager<C, E>, H: ErrorHandler<E> {
fn deref(&self) -> &C {
self.conn.as_ref().unwrap()
}
}
|
extern crate num;
pub mod create;
pub use create::*;
pub mod set;
pub use set::*;
pub mod mul;
pub use mul::*;
pub mod sdiv;
pub use sdiv::*;
pub mod misc;
pub use misc::*;
pub mod transform;
pub use transform::*;
updates
#![feature(no_std)]
#![no_std]
extern crate num;
pub mod create;
pub use create::*;
pub mod set;
pub use set::*;
pub mod mul;
pub use mul::*;
pub mod sdiv;
pub use sdiv::*;
pub mod misc;
pub use misc::*;
pub mod transform;
pub use transform::*;
|
// Copyright 2017 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Lock-free queues.
#![warn(missing_copy_implementations, missing_debug_implementations, missing_docs)]
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
#![cfg_attr(feature="clippy", warn(clippy))]
Add ConsumeError enum
// Copyright 2017 Kyle Mayes
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Lock-free queues.
#![warn(missing_copy_implementations, missing_debug_implementations, missing_docs)]
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
#![cfg_attr(feature="clippy", warn(clippy))]
use std::error;
use std::fmt;
//================================================
// Enums
//================================================
// ConsumeError __________________________________
/// Indicates the reason a `consume` operation could not return an item.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ConsumeError {
/// The queue was empty and had no remaining producers.
Disconnected,
/// The queue was empty.
Empty,
}
impl error::Error for ConsumeError {
fn description(&self) -> &str {
match *self {
ConsumeError::Disconnected => "the queue was empty and had no remaining producers",
ConsumeError::Empty => "the queue was empty",
}
}
}
impl fmt::Display for ConsumeError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", error::Error::description(self))
}
}
|
#![crate_name = "stal"]
#![crate_type = "lib"]
pub struct Hello;
pub enum Set {
Key(Vec<u8>),
Union(Vec<Set>),
Inter(Vec<Set>),
Diff(Vec<Set>),
}
use Set::*;
impl Set {
fn command(&self) -> &'static str {
match *self {
Key(_) => unreachable!(),
Union(_) => "SUNIONSTORE",
Inter(_) => "SINTERSTORE",
Diff(_) => "SDIFFSTORE",
}
}
pub fn convert(&self, ids: &mut Vec<String>, ops: &mut Vec<Vec<Vec<u8>>>) -> Vec<u8> {
let sets = match *self {
Key(ref k) => return k.clone(),
Union(ref sets) => sets,
Inter(ref sets) => sets,
Diff(ref sets) => sets,
};
let mut op = Vec::with_capacity(2 + sets.len());
let id = format!("stal:{}", ids.len());
let r = id.as_bytes().to_vec();
ids.push(id);
op.push(self.command().as_bytes().to_vec());
op.push(r.clone());
op.extend(sets.into_iter().map(|s| s.convert(ids, ops)));
ops.push(op);
r
}
}
pub struct Stal {
operation: String,
set: Set,
}
impl Stal {
pub fn new(operation: String, set: Set) -> Self {
Stal {
operation: operation,
set: set,
}
}
pub fn explain(&self) -> Vec<Vec<Vec<u8>>> {
let mut ids = vec![];
let mut ops = vec![];
let key = self.set.convert(&mut ids, &mut ops);
ops.push(vec![self.operation.as_bytes().to_vec(), key]);
ops
}
pub fn solve(&self) -> (Vec<Vec<Vec<u8>>>, usize) {
let mut ids = vec![];
let mut ops = vec![vec![b"MULTI".to_vec()]];
let key = self.set.convert(&mut ids, &mut ops);
ops.push(vec![self.operation.as_bytes().to_vec(), key]);
let pos = ops.len() - 1;
if ids.len() > 0 {
let mut del = vec![b"DEL".to_vec()];
del.extend(ids.into_iter().map(|x| x.as_bytes().to_vec()));
ops.push(del);
}
ops.push(vec![b"EXEC".to_vec()]);
(ops, pos)
}
}
Remove dummy test code
#![crate_name = "stal"]
#![crate_type = "lib"]
pub enum Set {
Key(Vec<u8>),
Union(Vec<Set>),
Inter(Vec<Set>),
Diff(Vec<Set>),
}
use Set::*;
impl Set {
fn command(&self) -> &'static str {
match *self {
Key(_) => unreachable!(),
Union(_) => "SUNIONSTORE",
Inter(_) => "SINTERSTORE",
Diff(_) => "SDIFFSTORE",
}
}
pub fn convert(&self, ids: &mut Vec<String>, ops: &mut Vec<Vec<Vec<u8>>>) -> Vec<u8> {
let sets = match *self {
Key(ref k) => return k.clone(),
Union(ref sets) => sets,
Inter(ref sets) => sets,
Diff(ref sets) => sets,
};
let mut op = Vec::with_capacity(2 + sets.len());
let id = format!("stal:{}", ids.len());
let r = id.as_bytes().to_vec();
ids.push(id);
op.push(self.command().as_bytes().to_vec());
op.push(r.clone());
op.extend(sets.into_iter().map(|s| s.convert(ids, ops)));
ops.push(op);
r
}
}
pub struct Stal {
operation: String,
set: Set,
}
impl Stal {
pub fn new(operation: String, set: Set) -> Self {
Stal {
operation: operation,
set: set,
}
}
pub fn explain(&self) -> Vec<Vec<Vec<u8>>> {
let mut ids = vec![];
let mut ops = vec![];
let key = self.set.convert(&mut ids, &mut ops);
ops.push(vec![self.operation.as_bytes().to_vec(), key]);
ops
}
pub fn solve(&self) -> (Vec<Vec<Vec<u8>>>, usize) {
let mut ids = vec![];
let mut ops = vec![vec![b"MULTI".to_vec()]];
let key = self.set.convert(&mut ids, &mut ops);
ops.push(vec![self.operation.as_bytes().to_vec(), key]);
let pos = ops.len() - 1;
if ids.len() > 0 {
let mut del = vec![b"DEL".to_vec()];
del.extend(ids.into_iter().map(|x| x.as_bytes().to_vec()));
ops.push(del);
}
ops.push(vec![b"EXEC".to_vec()]);
(ops, pos)
}
}
|
//! Parse and write Iridium Short Burst Data (SBD) messages.
//!
//! # Background
//!
//! Iridium is both a
//! [satellite constellation](https://en.wikipedia.org/wiki/Iridium_satellite_constellation)
//! and a [company](https://en.wikipedia.org/wiki/Iridium_Communications) that provides satellite
//! communications. The Iridium network is used by phones, modems, and other communication devices.
//!
//! One mode of transmitting data over the Iridium network is via Short Burst Data (SBD) messages.
//! These messages carry a payload of some small number of bytes, usually less than one thousand.
//! Messages can be Mobile-Originated (MO), meaning that they are sent *from* an Iridium modem, or
//! Mobile-Terminated (MT), meaning that the are sent *to* an Iridium modem. Mobile-Originated
//! messages are delivered either to an email address via MIME attachment, or directly to a given
//! IP address and port via TCP; this second method is called DirectIP.
//!
//! # Usage
//!
//! This is a simple library for reading mobile originated SBD messages from a stream, decoding
//! their headers and data payloads, and writing them back to a stream. This library does not
//! handle mobile terminated messages.
//!
//! MO messages can be read from a byte stream:
//!
//! ```
//! let mut file = std::fs::File::open("data/0-mo.sbd").unwrap();
//! let message = sbd::mo::Message::read_from(file).unwrap();
//! ```
//!
//! To receive MO messages via DirectIP, a server is provided.
//! This server will listen for incoming messages forever, storing them in a `Storage`:
//!
//! ```no_run
//! let storage = sbd::storage::FilesystemStorage::open("/var/iridium").unwrap();
//! let mut server = sbd::directip::Server::new("0.0.0.0:10800", storage);
//! server.serve_forever();
//! ```
//!
//! Most of the functionality of this library is exposed by a single executable, named `sbd`. Use
//! the `sbd` executable to inspect raw sbd files stores on a filesystem, interrogate sbd files on a
//! filesystem, and start that forever-running server to receive Iridium SBD DirectIP messages.
#![deny(missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_casts,
trivial_numeric_casts, unsafe_code, unstable_features, unused_extern_crates,
unused_import_braces, unused_qualifications)]
pub mod directip;
pub mod error;
mod information_element;
pub mod mo;
pub mod storage;
pub use error::Error;
extern crate byteorder;
extern crate chrono;
#[macro_use]
extern crate log;
extern crate rustc_serialize;
extern crate walkdir;
/// Create-specific `Result`.
pub type Result<T> = std::result::Result<T, Error>;
Reorganize lib bring-ins
//! Parse and write Iridium Short Burst Data (SBD) messages.
//!
//! # Background
//!
//! Iridium is both a
//! [satellite constellation](https://en.wikipedia.org/wiki/Iridium_satellite_constellation)
//! and a [company](https://en.wikipedia.org/wiki/Iridium_Communications) that provides satellite
//! communications. The Iridium network is used by phones, modems, and other communication devices.
//!
//! One mode of transmitting data over the Iridium network is via Short Burst Data (SBD) messages.
//! These messages carry a payload of some small number of bytes, usually less than one thousand.
//! Messages can be Mobile-Originated (MO), meaning that they are sent *from* an Iridium modem, or
//! Mobile-Terminated (MT), meaning that the are sent *to* an Iridium modem. Mobile-Originated
//! messages are delivered either to an email address via MIME attachment, or directly to a given
//! IP address and port via TCP; this second method is called DirectIP.
//!
//! # Usage
//!
//! This is a simple library for reading mobile originated SBD messages from a stream, decoding
//! their headers and data payloads, and writing them back to a stream. This library does not
//! handle mobile terminated messages.
//!
//! MO messages can be read from a byte stream:
//!
//! ```
//! let mut file = std::fs::File::open("data/0-mo.sbd").unwrap();
//! let message = sbd::mo::Message::read_from(file).unwrap();
//! ```
//!
//! To receive MO messages via DirectIP, a server is provided.
//! This server will listen for incoming messages forever, storing them in a `Storage`:
//!
//! ```no_run
//! let storage = sbd::storage::FilesystemStorage::open("/var/iridium").unwrap();
//! let mut server = sbd::directip::Server::new("0.0.0.0:10800", storage);
//! server.serve_forever();
//! ```
//!
//! Most of the functionality of this library is exposed by a single executable, named `sbd`. Use
//! the `sbd` executable to inspect raw sbd files stores on a filesystem, interrogate sbd files on a
//! filesystem, and start that forever-running server to receive Iridium SBD DirectIP messages.
#![deny(missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_casts,
trivial_numeric_casts, unsafe_code, unstable_features, unused_extern_crates,
unused_import_braces, unused_qualifications)]
extern crate byteorder;
extern crate chrono;
#[macro_use]
extern crate log;
extern crate rustc_serialize;
extern crate walkdir;
pub mod directip;
pub mod error;
mod information_element;
pub mod mo;
pub mod storage;
pub use error::Error;
/// Create-specific `Result`.
pub type Result<T> = std::result::Result<T, Error>;
|
#![feature(ptr_internals)]
extern crate clingo_sys;
extern crate libc;
use std::mem;
use std::ptr::Unique;
use std::ffi::CStr;
use std::ffi::CString;
use libc::c_int;
use libc::c_char;
use clingo_sys::*;
pub use clingo_sys::{clingo_ast_statement_t, clingo_ast_term_type_t, clingo_logger_t};
pub use clingo_sys::clingo_show_type_bitset_t;
pub use clingo_sys::clingo_solve_mode_bitset_t;
#[derive(Debug, Copy, Clone)]
pub enum Error {
Success = clingo_error_clingo_error_success as isize,
Runtime = clingo_error_clingo_error_runtime as isize,
Logic = clingo_error_clingo_error_logic as isize,
BadAlloc = clingo_error_clingo_error_bad_alloc as isize,
Unknown = clingo_error_clingo_error_unknown as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum SolveMode {
Async = clingo_solve_mode_clingo_solve_mode_async as isize,
Yield = clingo_solve_mode_clingo_solve_mode_yield as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ShowType {
CSP = clingo_show_type_clingo_show_type_csp as isize,
Shown = clingo_show_type_clingo_show_type_shown as isize,
Atoms = clingo_show_type_clingo_show_type_atoms as isize,
Terms = clingo_show_type_clingo_show_type_terms as isize,
Extra = clingo_show_type_clingo_show_type_extra as isize,
All = clingo_show_type_clingo_show_type_all as isize,
Complement = clingo_show_type_clingo_show_type_complement as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum TruthValue {
Free = clingo_truth_value_clingo_truth_value_free as isize,
True = clingo_truth_value_clingo_truth_value_true as isize,
False = clingo_truth_value_clingo_truth_value_false as isize,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum AstStatementType {
Rule = clingo_ast_statement_type_clingo_ast_statement_type_rule as isize,
Const = clingo_ast_statement_type_clingo_ast_statement_type_const as isize,
ShowSignature = clingo_ast_statement_type_clingo_ast_statement_type_show_signature as isize,
ShowTerm = clingo_ast_statement_type_clingo_ast_statement_type_show_term as isize,
Minimize = clingo_ast_statement_type_clingo_ast_statement_type_minimize as isize,
Script = clingo_ast_statement_type_clingo_ast_statement_type_script as isize,
Program = clingo_ast_statement_type_clingo_ast_statement_type_program as isize,
External = clingo_ast_statement_type_clingo_ast_statement_type_external as isize,
Edge = clingo_ast_statement_type_clingo_ast_statement_type_edge as isize,
Heuristic = clingo_ast_statement_type_clingo_ast_statement_type_heuristic as isize,
ProjectAtom = clingo_ast_statement_type_clingo_ast_statement_type_project_atom as isize,
ProjectAtomSignature =
clingo_ast_statement_type_clingo_ast_statement_type_project_atom_signature as isize,
TheoryDefinition =
clingo_ast_statement_type_clingo_ast_statement_type_theory_definition as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum AstSign {
None = clingo_ast_sign_clingo_ast_sign_none as isize,
Negation = clingo_ast_sign_clingo_ast_sign_negation as isize,
DoubleNegation = clingo_ast_sign_clingo_ast_sign_double_negation as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum AstLiteralType {
Boolean = clingo_ast_literal_type_clingo_ast_literal_type_boolean as isize,
Symbolic = clingo_ast_literal_type_clingo_ast_literal_type_symbolic as isize,
Comparison = clingo_ast_literal_type_clingo_ast_literal_type_comparison as isize,
CSP = clingo_ast_literal_type_clingo_ast_literal_type_csp as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum AstBodyLiteralType {
Literal = clingo_ast_body_literal_type_clingo_ast_body_literal_type_literal as isize,
Conditional = clingo_ast_body_literal_type_clingo_ast_body_literal_type_conditional as isize,
Aggregate = clingo_ast_body_literal_type_clingo_ast_body_literal_type_aggregate as isize,
BodyAggregate =
clingo_ast_body_literal_type_clingo_ast_body_literal_type_body_aggregate as isize,
TheoryAtom = clingo_ast_body_literal_type_clingo_ast_body_literal_type_theory_atom as isize,
Disjoint = clingo_ast_body_literal_type_clingo_ast_body_literal_type_disjoint as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ClauseType {
Learnt = clingo_clause_type_clingo_clause_type_learnt as isize,
Static = clingo_clause_type_clingo_clause_type_static as isize,
Volatile = clingo_clause_type_clingo_clause_type_volatile as isize,
VolatileStatic = clingo_clause_type_clingo_clause_type_volatile_static as isize,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum SolveEventType {
Model = clingo_solve_event_type_clingo_solve_event_type_model as isize,
Finish = clingo_solve_event_type_clingo_solve_event_type_finish as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum StatisticsType {
Empty = clingo_statistics_type_clingo_statistics_type_empty as isize,
Value = clingo_statistics_type_clingo_statistics_type_value as isize,
Array = clingo_statistics_type_clingo_statistics_type_array as isize,
Map = clingo_statistics_type_clingo_statistics_type_map as isize,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum SymbolType {
Infimum = clingo_symbol_type_clingo_symbol_type_infimum as isize,
Number = clingo_symbol_type_clingo_symbol_type_number as isize,
String = clingo_symbol_type_clingo_symbol_type_string as isize,
Function = clingo_symbol_type_clingo_symbol_type_function as isize,
Supremum = clingo_symbol_type_clingo_symbol_type_supremum as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum Warning {
OperationUndefined = clingo_warning_clingo_warning_operation_undefined as isize,
RuntimeError = clingo_warning_clingo_warning_runtime_error as isize,
AtomUndefined = clingo_warning_clingo_warning_atom_undefined as isize,
FileIncluded = clingo_warning_clingo_warning_file_included as isize,
VariableUnbound = clingo_warning_clingo_warning_variable_unbounded as isize,
GlobalVariable = clingo_warning_clingo_warning_global_variable as isize,
Other = clingo_warning_clingo_warning_other as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ConfigurationType {
Value = clingo_configuration_type_clingo_configuration_type_value as isize,
Array = clingo_configuration_type_clingo_configuration_type_array as isize,
Map = clingo_configuration_type_clingo_configuration_type_map as isize,
}
type SolveEventCallback = unsafe extern "C" fn(
type_: clingo_solve_event_type_t,
event: *mut ::std::os::raw::c_void,
data: *mut ::std::os::raw::c_void,
goon: *mut bool,
) -> bool;
pub trait SolveEventHandler<T> {
fn on_solve_event(type_: SolveEventType, data: &mut T, goon: &mut bool) -> bool;
#[doc(hidden)]
unsafe extern "C" fn unsafe_solve_callback(
type_: clingo_solve_event_type_t,
event: *mut ::std::os::raw::c_void,
data_: *mut ::std::os::raw::c_void,
goon_: *mut bool,
) -> bool {
// TODO assert!(!event.is_null());
assert!(!data_.is_null());
assert!(!goon_.is_null());
let event_type = match type_ {
clingo_solve_event_type_clingo_solve_event_type_model => SolveEventType::Model,
clingo_solve_event_type_clingo_solve_event_type_finish => SolveEventType::Finish,
_ => panic!("Rust binding failed to match clingo solve event type"),
};
let data = (data_ as *mut T).as_mut().unwrap();
let goon = goon_.as_mut().unwrap();
Self::on_solve_event(event_type, data, goon)
}
}
type AstCallback =
unsafe extern "C" fn(arg1: *const clingo_ast_statement_t, arg2: *mut ::std::os::raw::c_void)
-> bool;
pub trait AstStatementHandler<T> {
fn on_statement(arg1: &AstStatement, arg2: &mut T) -> bool;
#[doc(hidden)]
unsafe extern "C" fn unsafe_ast_callback(
stm_: *const clingo_ast_statement_t,
data_: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!stm_.is_null());
assert!(!data_.is_null());
let stm = (stm_ as *const AstStatement).as_ref().unwrap();
let data = (data_ as *mut T).as_mut().unwrap();
Self::on_statement(stm, data)
}
}
type LoggingCallback = unsafe extern "C" fn(
code: clingo_warning_t,
message: *const ::std::os::raw::c_char,
data: *mut ::std::os::raw::c_void,
);
pub trait Logger<T> {
fn log(code: Warning, message: &str, data: &mut T);
#[doc(hidden)]
unsafe extern "C" fn unsafe_logging_callback(
code_: clingo_warning_t,
message_: *const ::std::os::raw::c_char,
data_: *mut ::std::os::raw::c_void,
) {
assert!(!message_.is_null());
assert!(!data_.is_null());
let warning = match code_ as u32 {
clingo_warning_clingo_warning_atom_undefined => Warning::AtomUndefined,
clingo_warning_clingo_warning_file_included => Warning::FileIncluded,
clingo_warning_clingo_warning_global_variable => Warning::GlobalVariable,
clingo_warning_clingo_warning_operation_undefined => Warning::OperationUndefined,
clingo_warning_clingo_warning_other => Warning::Other,
clingo_warning_clingo_warning_runtime_error => Warning::RuntimeError,
clingo_warning_clingo_warning_variable_unbounded => Warning::VariableUnbound,
_ => panic!("Rust binding failed to match clingo warning"),
};
let c_str = CStr::from_ptr(message_);
let message = c_str.to_str().unwrap();
let data = (data_ as *mut T).as_mut().unwrap();
Self::log(warning, message, data)
}
}
type GroundCallback = unsafe extern "C" fn(
location: *const clingo_location_t,
name: *const ::std::os::raw::c_char,
arguments: *const clingo_symbol_t,
arguments_size: usize,
data: *mut ::std::os::raw::c_void,
symbol_callback: clingo_symbol_callback_t,
symbol_callback_data: *mut ::std::os::raw::c_void,
) -> bool;
pub trait GroundEventHandler<T> {
fn on_ground_event(
location: &Location,
name: &str,
arguments: &[Symbol],
data: &mut T,
symbol_callback: clingo_symbol_callback_t,
symbol_callback_data: *mut ::std::os::raw::c_void,
) -> bool;
#[doc(hidden)]
unsafe extern "C" fn unsafe_ground_callback(
location_: *const clingo_location_t,
name_: *const ::std::os::raw::c_char,
arguments_: *const clingo_symbol_t,
arguments_size: usize,
data_: *mut ::std::os::raw::c_void,
symbol_callback: clingo_symbol_callback_t,
symbol_callback_data: *mut ::std::os::raw::c_void,
//TODO wrap symbol call back
) -> bool {
assert!(!location_.is_null());
assert!(!name_.is_null());
assert!(!arguments_.is_null());
assert!(!data_.is_null());
assert!(!symbol_callback_data.is_null());
let location = (location_ as *const Location).as_ref().unwrap();
let c_str = CStr::from_ptr(name_);
let name = c_str.to_str().unwrap();
let arguments = std::slice::from_raw_parts(arguments_ as *const Symbol, arguments_size);
let data = (data_ as *mut T).as_mut().unwrap();
Self::on_ground_event(
location,
name,
arguments,
data,
symbol_callback,
symbol_callback_data,
)
}
}
#[derive(Debug, Copy, Clone)]
pub struct SymbolicLiteral(clingo_symbolic_literal_t);
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Literal(clingo_literal_t);
impl Literal {
pub fn negate(&self) -> Literal {
Literal(-(self.0))
}
pub fn UNSAFE_from(Atom(atom): Atom) -> Literal {
Literal(atom as clingo_literal_t)
}
pub fn get_integer(&self) -> i32 {
self.0
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct WeightedLiteral(clingo_literal_t);
//TODO check impl WeightedLiteral {
// pub fn negate(&self) -> Literal {
// Literal(-(self.0))
// }
// pub fn UNSAFE_from(Atom(atom): Atom) -> Literal {
// Literal(atom as clingo_literal_t)
// }
// pub fn get_integer(&self) -> i32 {
// self.0
// }
// }
#[derive(Debug, Copy, Clone)]
pub struct Atom(clingo_atom_t);
#[derive(Debug, Copy, Clone)]
pub struct Id(clingo_id_t);
impl Id {
pub fn get_integer(&self) -> u32 {
self.0
}
}
#[derive(Debug, Copy, Clone)]
pub struct Location(clingo_location);
#[derive(Debug, Clone)]
pub struct Symbol(clingo_symbol_t);
impl PartialEq for Symbol {
fn eq(&self, other: &Symbol) -> bool {
unsafe { clingo_symbol_is_equal_to(self.0, other.0) }
}
}
impl Eq for Symbol {}
/// Construct a symbol representing a number.
// TODO replace c_int with u32 ?
pub fn create_number(number: c_int) -> Symbol {
let mut symbol = 0 as clingo_symbol_t;
unsafe { clingo_symbol_create_number(number, &mut symbol) };
Symbol(symbol)
}
/// Construct a symbol representing \#sup.
pub fn create_supremum() -> Symbol {
let mut symbol = 0 as clingo_symbol_t;
unsafe { clingo_symbol_create_supremum(&mut symbol) };
Symbol(symbol)
}
/// Construct a symbol representing \#inf
pub fn create_infimum() -> Symbol {
let mut symbol = 0 as clingo_symbol_t;
unsafe { clingo_symbol_create_infimum(&mut symbol) };
Symbol(symbol)
}
/// Construct a symbol representing a string.
///
/// # Errors:
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create_string(string: &str) -> Result<Symbol, &'static str> {
let mut symbol = 0 as clingo_symbol_t;
let c_str = CString::new(string).unwrap();
if unsafe { clingo_symbol_create_string(c_str.as_ptr(), &mut symbol) } {
Ok(Symbol(symbol))
} else {
Err(error_message())
}
}
/// Construct a symbol representing an id.
///
/// **Note:** This is just a shortcut for `create_function()` with
/// empty arguments.
///
/// # Arguments
///
/// * `name` - the name of the symbol
/// * `positive` - whether the symbol has a classical negation sign
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create_id(name: &str, positive: bool) -> Result<Symbol, &'static str> {
let mut symbol = 0 as clingo_symbol_t;
let name_c_str = CString::new(name).unwrap();
if unsafe { clingo_symbol_create_id(name_c_str.as_ptr(), positive, &mut symbol) } {
// println!("create Symbol! sym {} {:?}", symbol, name_c_str);
Ok(Symbol(symbol))
} else {
Err(error_message())
}
}
/// Construct a symbol representing a function or tuple.
///
///
/// **Note:** To create tuples, the empty string has to be used as name.
///
/// # Arguments
///
/// * `name` - the name of the function
/// * `arguments` - the arguments of the function
/// * `arguments_size` - the number of arguments
/// * `positive` - whether the symbol has a classical negation sign
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create_function(
name: &str,
arguments: &[Symbol],
positive: bool,
) -> Result<Symbol, &'static str> {
let mut symbol = 0 as clingo_symbol_t;
let name_c_str = CString::new(name).unwrap();
if unsafe {
clingo_symbol_create_function(
name_c_str.as_ptr(),
arguments.as_ptr() as *const clingo_symbol_t,
arguments.len(),
positive,
&mut symbol,
)
} {
Ok(Symbol(symbol))
} else {
Err(error_message())
}
}
impl Symbol {
/// Get the number of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Number`
pub fn number(&self) -> Result<i32, &'static str> {
let mut number = 0;
if unsafe { clingo_symbol_number(self.0, &mut number) } {
Ok(number)
} else {
Err(error_message())
}
}
/// Get the name of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn name(&self) -> Result<&str, &'static str> {
let mut char_ptr = std::ptr::null() as *const c_char;
if unsafe { clingo_symbol_name(self.0, &mut char_ptr) } {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
Ok(c_str.to_str().unwrap())
} else {
Err(error_message())
}
}
/// Get the string of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::String`
pub fn string(&self) -> Result<&str, &'static str> {
let mut char_ptr = std::ptr::null() as *const c_char;
if unsafe { clingo_symbol_string(self.0, &mut char_ptr) } {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
Ok(c_str.to_str().unwrap())
} else {
Err(error_message())
}
}
/// Check if a function is positive (does not have a sign).
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn is_positive(&self) -> Result<bool, &'static str> {
let mut positive = false;
if unsafe { clingo_symbol_is_positive(self.0, &mut positive) } {
Ok(positive)
} else {
Err(error_message())
}
}
/// Check if a function is negative (has a sign).
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn is_negative(&self) -> Result<bool, &'static str> {
let mut negative = false;
if unsafe { clingo_symbol_is_negative(self.0, &mut negative) } {
Ok(negative)
} else {
Err(error_message())
}
}
/// Get the arguments of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn arguments(&self) -> Result<Vec<Symbol>, &'static str> {
let mut symbol_ptr = std::ptr::null() as *const clingo_symbol_t;
let mut size: usize = 0;
if unsafe { clingo_symbol_arguments(self.0, &mut symbol_ptr, &mut size) } {
let mut symbols = Vec::<Symbol>::with_capacity(size);
for _ in 0..size {
let nsymbol = unsafe { *symbol_ptr };
symbols.push(Symbol(nsymbol));
symbol_ptr = unsafe { symbol_ptr.offset(1) };
}
Ok(symbols)
} else {
Err(error_message())
}
}
/// Get the type of a symbol.
///
/// # Errors
///
/// - may failed to match clingo symbol type
//TODO maybe unnecesary function in Rust API?
pub fn get_type(&self) -> Result<SymbolType, &'static str> {
let stype = unsafe { clingo_symbol_type(self.0) } as u32;
match stype {
clingo_symbol_type_clingo_symbol_type_infimum => Ok(SymbolType::Infimum),
clingo_symbol_type_clingo_symbol_type_number => Ok(SymbolType::Number),
clingo_symbol_type_clingo_symbol_type_string => Ok(SymbolType::String),
clingo_symbol_type_clingo_symbol_type_function => Ok(SymbolType::Function),
clingo_symbol_type_clingo_symbol_type_supremum => Ok(SymbolType::Supremum),
_ => Err("Rust binding failed to match clingo symbol type"),
}
}
/// Get the string representation of a symbol.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
///
pub fn to_string(&self) -> Option<String> {
let mut size: usize = 0;
let err = unsafe { clingo_symbol_to_string_size(self.0, &mut size) };
if !err {
None
} else {
let a1 = vec![1; size];
let cstring = unsafe { CString::from_vec_unchecked(a1) };
let err =
unsafe { clingo_symbol_to_string(self.0, cstring.as_ptr() as *mut c_char, size) };
if !err {
None
} else {
cstring.into_string().ok()
}
}
}
/// Check if a symbol is less than another symbol.
///
/// Symbols are first compared by type. If the types are equal, the values are
/// compared (where strings are compared using strcmp). Functions are first
/// compared by signature and then lexicographically by arguments.
///
/// # Arguments
///
/// * `a` - first symbol
/// * `b` - second symbol
///
/// **Returns** whether a < b
pub fn is_less_than(&self, other: &Symbol) -> bool {
unsafe { clingo_symbol_is_less_than(self.0, other.0) }
}
/// Calculate a hash code of a symbol.
pub fn hash(&self) -> usize {
unsafe { clingo_symbol_hash(self.0) }
}
}
// struct MaLogger;
// impl Logger<u32> for MaLogger {
//
// fn log(code: Warning, message: &str, data: &mut u32){
// println!("log: {}",message);
// println!("warn: {:?}",code);
// }
// }
/// Parse the given program and return an abstract syntax tree for each statement via a callback.
///
/// # Arguments
///
/// * `program` - the program in gringo syntax
/// * `callback` - the callback reporting statements
/// * `callback_data` - user data for the callback
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if parsing fails
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn parse_program<D, T: AstStatementHandler<D>>(
program_: &str,
_callback: &T,
callback_data: &mut D,
) -> Result<(), &'static str> {
let logger = None;
// let logger = Some(MaLogger::unsafe_logging_callback as LoggingCallback);
let logger_data = std::ptr::null_mut();
let program = CString::new(program_).unwrap();
let data = callback_data as *mut D;
if unsafe {
clingo_parse_program(
program.as_ptr(),
Some(T::unsafe_ast_callback as AstCallback),
data as *mut ::std::os::raw::c_void,
logger,
logger_data,
0,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Parse the given program and return an abstract syntax tree for each statement via a callback.
///
/// # Arguments
///
/// * `program` - the program in gringo syntax
/// * `callback` - the callback reporting statements
/// * `callback_data` - user data for the callback
/// * `logger` - callback to report messages during parsing
/// * `logger_data` - user data for the logger
/// * `message_limit` - the maximum number of times the logger is called
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if parsing fails
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn parse_program_with_logger<CD, C: AstStatementHandler<CD>, LD, L: Logger<LD>>(
program_: &str,
_callback: &C,
cdata_: &mut CD,
_logger: &L,
ldata_: &mut LD,
message_limit: u32,
) -> Result<(), &'static str> {
let callback_data = cdata_ as *mut CD;
let logger_data = ldata_ as *mut LD;
let program = CString::new(program_).unwrap();
if unsafe {
clingo_parse_program(
program.as_ptr(),
Some(C::unsafe_ast_callback as AstCallback),
callback_data as *mut ::std::os::raw::c_void,
Some(L::unsafe_logging_callback as LoggingCallback),
logger_data as *mut ::std::os::raw::c_void,
message_limit,
)
} {
Ok(())
} else {
Err(error_message())
}
}
pub fn create_location(
begin_line: usize,
end_line: usize,
begin_column: usize,
end_column: usize,
begin_file_: &str,
end_file_: &str,
) -> Location {
let begin_file = CString::new(begin_file_).unwrap();
let end_file = CString::new(end_file_).unwrap();
let loc = clingo_location {
begin_line: begin_line,
end_line: end_line,
begin_column: begin_column,
end_column: end_column,
begin_file: begin_file.as_ptr(),
end_file: end_file.as_ptr(),
};
Location(loc)
}
/// Obtain the clingo version.
///
/// `(major version, minor version, revision number)`
pub fn version() -> (i32, i32, i32) {
let mut major = 0;
let mut minor = 0;
let mut revision = 0;
unsafe { clingo_version(&mut major, &mut minor, &mut revision) };
(major, minor, revision)
}
pub struct Part<'a> {
name: CString,
params: &'a [Symbol],
}
impl<'a> Part<'a> {
pub fn new(name: &str, params: &'a [Symbol]) -> Part<'a> {
Part {
name: CString::new(name).unwrap(),
params: params,
}
}
fn from(&self) -> clingo_part {
clingo_part {
name: self.name.as_ptr(),
params: self.params.as_ptr() as *const clingo_symbol_t,
size: self.params.len(),
}
}
}
/// Get the last error code set by a clingo API call.
///
/// **Note:** Each thread has its own local error code.
pub fn error() -> Error {
let code = unsafe { clingo_error_code() };
match code as u32 {
clingo_error_clingo_error_success => Error::Success,
clingo_error_clingo_error_runtime => Error::Runtime,
clingo_error_clingo_error_logic => Error::Logic,
clingo_error_clingo_error_bad_alloc => Error::BadAlloc,
clingo_error_clingo_error_unknown => Error::Unknown,
_ => panic!("Rust binding failed to match clingo error"),
}
}
/// Get the last error message set if an API call fails.
///
/// **Note:** Each thread has its own local error message.
pub fn error_message() -> &'static str {
let char_ptr: *const c_char = unsafe { clingo_error_message() };
if char_ptr.is_null() {
""
} else {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
c_str.to_str().unwrap()
}
}
/// Set a custom error code and message in the active thread.
pub fn set_error(code: Error, message: &str) {
let message_c_str = CString::new(message).unwrap();
unsafe { clingo_set_error(code as clingo_error_t, message_c_str.as_ptr()) }
}
pub struct Propagator(clingo_propagator_t);
pub trait PropagatorBuilder<T> {
fn init(_init: &mut PropagateInit, _data: &mut T) -> bool {
true
}
fn propagate(_control: &mut PropagateControl, _changes: &[Literal], _data: &mut T) -> bool {
true
}
fn undo(_control: &mut PropagateControl, _changes: &[Literal], _data: &mut T) -> bool {
true
}
fn check(_control: &mut PropagateControl, _data: &mut T) -> bool {
true
}
/// Get a Propagator
fn new() -> Propagator {
let prop = clingo_propagator_t {
init: Some(Self::unsafe_init),
propagate: Some(Self::unsafe_propagate),
undo: Some(Self::unsafe_undo),
check: Some(Self::unsafe_check),
};
Propagator(prop)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_init(
init_: *mut clingo_propagate_init_t,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!init_.is_null());
assert!(!data.is_null());
let init = (init_ as *mut PropagateInit).as_mut().unwrap();
let propagator = (data as *mut T).as_mut().unwrap();
Self::init(init, propagator)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_propagate(
control_: *mut clingo_propagate_control_t,
changes_: *const clingo_literal_t,
size: usize,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!control_.is_null());
assert!(!changes_.is_null());
assert!(!data.is_null());
let control = (control_ as *mut PropagateControl).as_mut().unwrap();
let changes = std::slice::from_raw_parts(changes_ as *const Literal, size);
let propagator = (data as *mut T).as_mut().unwrap();
Self::propagate(control, changes, propagator)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_undo(
control_: *mut clingo_propagate_control_t,
changes_: *const clingo_literal_t,
size: usize,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!control_.is_null());
assert!(!changes_.is_null());
assert!(!data.is_null());
let control = (control_ as *mut PropagateControl).as_mut().unwrap();
let changes = std::slice::from_raw_parts(changes_ as *const Literal, size);
let propagator = (data as *mut T).as_mut().unwrap();
Self::undo(control, changes, propagator)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_check(
control_: *mut clingo_propagate_control_t,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!control_.is_null());
assert!(!data.is_null());
let control = (control_ as *mut PropagateControl).as_mut().unwrap();
let propagator = (data as *mut T).as_mut().unwrap();
Self::check(control, propagator)
}
}
#[derive(Debug)]
pub struct Control {
ctl: Unique<clingo_control_t>,
}
impl Drop for Control {
fn drop(&mut self) {
// println!("drop Control");
unsafe { clingo_control_free(self.ctl.as_ptr()) }
}
}
impl Control {
/// Create a new control object.
///
/// **Note:** Only gringo options (without `--output`) and clasp`s options are supported as
/// arguments, except basic options such as `--help`.
/// Furthermore, a control object is blocked while a search call is active;
/// you must not call any member function during search.
///
/// Messages are printed to stderr.
///
/// # Arguments
///
/// * `arguments` - string array of command line arguments
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if argument parsing fails
pub fn new(
arguments: std::vec::Vec<String>,
message_limit: u32,
) -> Result<Control, &'static str> {
let logger = None;
let logger_data = std::ptr::null_mut();
// create a vector of zero terminated strings
let mut args: Vec<CString> = Vec::new();
for arg in arguments {
args.push(CString::new(arg).unwrap());
}
// convert the strings to raw pointers
let c_args = args.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
let mut ctl = unsafe { mem::uninitialized() };
if unsafe {
clingo_control_new(
c_args.as_ptr(),
c_args.len(),
logger,
logger_data,
message_limit,
&mut ctl,
)
} {
Ok(Control {
ctl: Unique::new(ctl).unwrap(),
})
} else {
Err(error_message())
}
}
/// Create a new control object.
///
/// **Note:** Only gringo options (without <code>\-\-output</code>) and clasp`s options are supported as arguments,
/// except basic options such as <code>\-\-help</code>.
/// Furthermore, a control object is blocked while a search call is active;
/// you must not call any member function during search.
///
/// # Arguments
///
/// * `arguments` - C string array of command line arguments
/// * `logger` - callback functions for warnings and info messages
/// * `logger_data` - user data for the logger callback
/// * `message_limit` - maximum number of times the logger callback is called
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if argument parsing fails
pub fn new_with_logger<D, T: Logger<D>>(
arguments: std::vec::Vec<String>,
_logger: &T,
logger_data: &mut D,
message_limit: u32,
) -> Result<Control, &'static str> {
// create a vector of zero terminated strings
let mut args: Vec<CString> = Vec::new();
for arg in arguments {
args.push(CString::new(arg).unwrap());
}
// convert the strings to raw pointers
let c_args = args.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
let mut ctl = unsafe { mem::uninitialized() };
let data = logger_data as *mut D;
if unsafe {
clingo_control_new(
c_args.as_ptr(),
c_args.len(),
Some(T::unsafe_logging_callback as LoggingCallback),
data as *mut ::std::os::raw::c_void,
message_limit,
&mut ctl,
)
} {
Ok(Control {
ctl: Unique::new(ctl).unwrap(),
})
} else {
Err(error_message())
}
}
//TODO pub fn clingo_control_load(control: *mut Control, file: *const c_char) -> bool;
/// Extend the logic program with the given non-ground logic program in string form.
///
/// This function puts the given program into a block of form: `#program name(parameters).`
///
/// After extending the logic program, the corresponding program parts are typically grounded
/// with `ground()`.
///
/// # Arguments
///
/// * `name` name of the program block
/// * `parameters` string array of parameters of the program block
/// * `program` string representation of the program
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if parsing fails
pub fn add(
&mut self,
name_: &str,
parameters: Vec<&str>,
program_: &str,
) -> Result<(), &'static str> {
let name = CString::new(name_).unwrap();
let name_ptr = name.as_ptr();
let program = CString::new(program_).unwrap();
let program_ptr = program.as_ptr();
let parameters_size = parameters.len();
// create a vector of zero terminated strings
let l_parameters = parameters
.into_iter()
.map(|arg| CString::new(arg).unwrap())
.collect::<Vec<CString>>();
// convert the strings to raw pointers
let c_parameters = l_parameters
.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
if unsafe {
clingo_control_add(
self.ctl.as_ptr(),
name_ptr,
c_parameters.as_ptr(),
parameters_size,
program_ptr,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Ground the selected [`Part`](struct.Part.html) parts of the current (non-ground) logic program.
///
/// After grounding, logic programs can be solved with `solve()`.
///
/// **Note:** Parts of a logic program without an explicit <tt>\#program</tt>
/// specification are by default put into a program called `base` - without
/// arguments.
///
/// # Arguments
///
/// * `parts` array of parts to ground
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
///
/// **See:** [`Part`](struct.Part.html)
pub fn ground(&mut self, sparts: &[Part]) -> Result<(), &'static str> {
let parts = sparts
.iter()
.map(|arg| arg.from())
.collect::<Vec<clingo_part>>();
let parts_size = sparts.len();
if unsafe {
clingo_control_ground(
self.ctl.as_ptr(),
parts.as_ptr(),
parts_size,
None,
std::ptr::null_mut() as *mut ::std::os::raw::c_void,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Ground the selected [`Part`](struct.Part.html) parts of the current (non-ground) logic
/// program.
///
/// After grounding, logic programs can be solved with `solve()`.
///
/// **Note:** Parts of a logic program without an explicit <tt>\#program</tt>
/// specification are by default put into a program called `base` - without
/// arguments.
///
/// # Arguments
///
/// * `parts` array of parts to ground
/// * `ground_callback` callback to implement external functions
/// * `ground_callback_data` user data for ground_callback
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - error code of ground callback
///
/// **See:** [`Part`](struct.Part.html)
pub fn ground_with_event_handler<D, T: GroundEventHandler<D>>(
&mut self,
sparts: &[Part],
_ground_callback: &T,
ground_callback_data: &mut D,
) -> Result<(), &'static str> {
let parts = sparts
.iter()
.map(|arg| arg.from())
.collect::<Vec<clingo_part>>();
let parts_size = sparts.len();
let data = ground_callback_data as *mut D;
if unsafe {
clingo_control_ground(
self.ctl.as_ptr(),
parts.as_ptr(),
parts_size,
Some(T::unsafe_ground_callback as GroundCallback),
data as *mut ::std::os::raw::c_void,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Solve the currently [`ground()`](struct.Control.html#method.ground) grounded logic program enumerating its models.
///
/// See the [`SolveHandle`](struct.SolveHandle.html) module for more information.
///
/// # Arguments
///
/// * `mode` - configures the search mode
/// * `assumptions` - array of assumptions to solve under
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving could not be started
pub fn solve(
&mut self,
mode: SolveMode,
assumptions: &[SymbolicLiteral],
) -> Result<&mut SolveHandle, &'static str> {
let mut handle = std::ptr::null_mut() as *mut clingo_solve_handle_t;
if unsafe {
clingo_control_solve(
self.ctl.as_ptr(),
mode as clingo_solve_mode_bitset_t,
assumptions.as_ptr() as *const clingo_symbolic_literal_t,
assumptions.len(),
None,
std::ptr::null_mut() as *mut ::std::os::raw::c_void,
&mut handle,
)
} {
unsafe { (handle as *mut SolveHandle).as_mut() }
.ok_or("Rust binding failed to dereference pointer to clingo solve handle")
} else {
Err(error_message())
}
}
/// Solve the currently [`ground()`](struct.Control.html#method.ground) grounded logic program
/// enumerating its models.
///
/// See the [`SolveHandle`](struct.SolveHandle.html) module for more information.
///
/// # Arguments
///
/// * `mode` - configures the search mode
/// * `assumptions` - array of assumptions to solve under
/// * `notify` - the event handler to register
/// * `data` - the user data for the event handler
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving could not be started
pub fn solve_with_event_handler<D, T: SolveEventHandler<D>>(
&mut self,
mode: clingo_solve_mode_bitset_t,
assumptions: &[SymbolicLiteral],
_notify: &T,
data_: &mut D,
) -> Result<&mut SolveHandle, &'static str> {
let mut handle = std::ptr::null_mut() as *mut clingo_solve_handle_t;
let data = data_ as *mut D;
if unsafe {
clingo_control_solve(
self.ctl.as_ptr(),
mode,
assumptions.as_ptr() as *const clingo_symbolic_literal_t,
assumptions.len(),
Some(T::unsafe_solve_callback as SolveEventCallback),
data as *mut ::std::os::raw::c_void,
&mut handle,
)
} {
unsafe { (handle as *mut SolveHandle).as_mut() }
.ok_or("Rust binding failed to dereference pointer to clingo solve handle")
} else {
Err(error_message())
}
}
/// Clean up the domains of clingo`s grounding component using the solving
/// component`s top level assignment.
///
/// This function removes atoms from domains that are false and marks atoms as
/// facts that are true. With multi-shot solving, this can result in smaller
/// groundings because less rules have to be instantiated and more
/// simplifications can be applied.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn cleanup(&mut self) -> Result<(), &'static str> {
if unsafe { clingo_control_cleanup(self.ctl.as_ptr()) } {
Ok(())
} else {
Err(error_message())
}
}
/// Assign a truth value to an external atom.
///
/// If the atom does not exist or is not external, this is a noop.
///
/// # Arguments
///
/// * `atom` atom to assign
/// * `value` - the truth value
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn assign_external(
&mut self,
symbol: &Symbol,
value: TruthValue,
) -> Result<(), &'static str> {
if unsafe {
clingo_control_assign_external(
self.ctl.as_ptr(),
symbol.0,
value as clingo_truth_value_t,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Release an external atom.
///
/// After this call, an external atom is no longer external and subject to
/// program simplifications. If the atom does not exist or is not external,
/// this is a noop.
///
/// # Arguments
///
/// * `atom` - atom to release
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
// pub fn clingo_control_release_external(control: *mut Control,
// atom: clingo_symbol_t)
// -> u8;
/// Register a custom propagator with the control object.
///
/// If the sequential flag is set to true, the propagator is called
/// sequentially when solving with multiple threads.
///
/// See the [`Propagator`](struct.Propagator) module for more information.
///
/// # Arguments
///
/// * `propagator` - the propagator
/// * `data` user data passed to the propagator functions
/// * `sequential` - whether the propagator should be called sequentially
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn register_propagator<D, T: PropagatorBuilder<D>>(
&mut self,
_propagator_builder: &T,
data: &mut D,
sequential: bool,
) -> Result<(), &'static str> {
let propagator = T::new();
let propagator_ptr: *const Propagator = &propagator;
let data_ptr = data as *mut D;
if unsafe {
clingo_control_register_propagator(
self.ctl.as_ptr(),
propagator_ptr as *const clingo_propagator,
data_ptr as *mut ::std::os::raw::c_void,
sequential,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Get a statistics object to inspect solver statistics.
///
/// Statistics are updated after a solve call.
///
/// See the [`Statistics`](struct.Statistics.html) module for more information.
///
/// **Attention:**
/// The level of detail of the statistics depends on the stats option
/// (which can be set using [`Configuration`](struct.Configuration.html) module or passed as an
/// option when [`new()`](struct.Control.html#method.new) creating the control object).
/// The default level zero only provides basic statistics,
/// level one provides extended and accumulated statistics,
/// and level two provides per-thread statistics.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn statistics(&mut self) -> Result<&mut Statistics, &'static str> {
let mut stat = std::ptr::null_mut() as *mut clingo_statistics_t;
if unsafe { clingo_control_statistics(self.ctl.as_ptr(), &mut stat) } {
unsafe { (stat as *mut Statistics).as_mut() }
.ok_or("Rust bindings failed to dereference pointer to clingo statistics")
} else {
Err(error_message())
}
}
/// Interrupt the active solve call (or the following solve call right at the beginning).
pub fn interrupt(&mut self) {
unsafe {
clingo_control_interrupt(self.ctl.as_ptr());
}
}
/// Get a configuration object to change the solver configuration.
///
/// See the [`Configuration`](struct.Configuration.html) module for more information.
pub fn configuration(&mut self) -> Option<&mut Configuration> {
let mut conf = std::ptr::null_mut() as *mut clingo_configuration_t;
if unsafe { clingo_control_configuration(self.ctl.as_ptr(), &mut conf) } {
unsafe { (conf as *mut Configuration).as_mut() }
} else {
None
}
}
/// Configure how learnt constraints are handled during enumeration.
///
/// If the enumeration assumption is enabled, then all information learnt from
/// the solver`s various enumeration modes is removed after a solve call. This
/// includes enumeration of cautious or brave consequences, enumeration of
/// answer sets with or without projection, or finding optimal models, as well
/// as clauses added with clingo_solve_control_add_clause().
///
/// **Attention:** For practical purposes, this option is only interesting for single-shot solving
/// or before the last solve call to squeeze out a tiny bit of performance.
/// Initially, the enumeration assumption is enabled.
///
/// # Arguments
///
/// * `enable` - whether to enable the assumption
pub fn use_enumeration_assumption(&mut self, enable: bool) -> Option<()> {
if unsafe { clingo_control_use_enumeration_assumption(self.ctl.as_ptr(), enable) } {
Some(())
} else {
None
}
}
/// Return the symbol for a constant definition of form: <tt>\#const name = symbol</tt>.
///
/// # Arguments
///
/// * `name` - the name of the constant
pub fn get_const(&mut self, name: &str) -> Option<Symbol> {
let c_str_name = CString::new(name).unwrap();
let mut symbol = 0 as clingo_symbol_t;
if unsafe { clingo_control_get_const(self.ctl.as_ptr(), c_str_name.as_ptr(), &mut symbol) }
{
Some(Symbol(symbol))
} else {
None
}
}
/// Check if there is a constant definition for the given constant.
///
/// # Arguments
///
/// * `name` - the name of the constant
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if constant definition does not exist
///
/// **See:** [`Part::get_const()`](struct.Part.html#method.get_const)
pub fn has_const(&mut self, name: &str) -> Result<bool, &'static str> {
let c_str_name = CString::new(name).unwrap();
let mut exist = false;
if unsafe { clingo_control_has_const(self.ctl.as_ptr(), c_str_name.as_ptr(), &mut exist) } {
Ok(exist)
} else {
Err(error_message())
}
}
/// Get an object to inspect symbolic atoms (the relevant Herbrand base) used
/// for grounding.
///
/// See the [`SymbolicAtoms`](struct.SymbolicAtoms.html) module for more information.
pub fn symbolic_atoms(&mut self) -> Option<&mut SymbolicAtoms> {
let mut atoms = std::ptr::null_mut() as *mut clingo_symbolic_atoms_t;
if unsafe { clingo_control_symbolic_atoms(self.ctl.as_ptr(), &mut atoms) } {
unsafe { (atoms as *mut SymbolicAtoms).as_mut() }
} else {
None
}
}
/// Get an object to inspect theory atoms that occur in the grounding.
///
/// See the [`TheoryAtoms`](struct.TheoryAtoms.html) module for more information.
pub fn theory_atoms(&mut self) -> Option<&mut TheoryAtoms> {
let mut atoms = std::ptr::null_mut() as *mut clingo_theory_atoms_t;
if unsafe { clingo_control_theory_atoms(self.ctl.as_ptr(), &mut atoms) } {
unsafe { (atoms as *mut TheoryAtoms).as_mut() }
} else {
None
}
}
/// Get an object to add ground directives to the program.
///
/// See the [`ProgramBuilder`](struct.ProgramBuilder.html) module for more information.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn backend(&mut self) -> Option<&mut Backend> {
let mut backend = std::ptr::null_mut();
if unsafe { clingo_control_backend(self.ctl.as_ptr(), &mut backend) } {
unsafe { (backend as *mut Backend).as_mut() }
} else {
None
}
}
/// Get an object to add non-ground directives to the program.
///
/// See the [`ProgramBuilder`](struct.ProgramBuilder.html) module for more information.
pub fn program_builder(&mut self) -> Option<&mut ProgramBuilder> {
let mut builder = std::ptr::null_mut() as *mut clingo_program_builder_t;
if unsafe { clingo_control_program_builder(self.ctl.as_ptr(), &mut builder) } {
unsafe { (builder as *mut ProgramBuilder).as_mut() }
} else {
None
}
}
}
pub struct ProgramBuilder(clingo_program_builder_t);
impl ProgramBuilder {
/// Begin building a program.
pub fn begin(&mut self) -> Option<()> {
if unsafe { clingo_program_builder_begin(&mut self.0) } {
Some(())
} else {
None
}
}
/// Adds a statement to the program.
///
/// **Attention:** [`begin()`](struct.ProgramBuilder.html#method.begin) must be called before
/// adding statements and [`end()`](struct.ProgramBuilder.html#method.end) must be called after
/// all statements have been added.
///
/// # Arguments
///
/// * `statement` - the statement to add
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) for statements of invalid form
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn add(&mut self, statement: &AstStatement) -> Result<(), &'static str> {
let AstStatement(ref stm) = *statement;
if unsafe { clingo_program_builder_add(&mut self.0, stm) } {
Ok(())
} else {
Err(error_message())
}
}
/// End building a program.
pub fn end(&mut self) -> Option<()> {
if unsafe { clingo_program_builder_end(&mut self.0) } {
Some(())
} else {
None
}
}
}
#[derive(Clone, Copy)]
pub struct AstHeadLiteral(clingo_ast_head_literal_t);
#[derive(Clone, Copy)]
pub struct AstBodyLiteral(clingo_ast_body_literal_t);
impl AstBodyLiteral {
pub fn new(
Location(location): Location,
sign: AstSign,
type_: AstBodyLiteralType,
lit_ref: &AstLiteral,
) -> AstBodyLiteral {
let _bg_union_2 = clingo_ast_body_literal__bindgen_ty_1 {
literal: (lit_ref as *const AstLiteral) as *const clingo_ast_literal,
};
AstBodyLiteral(clingo_ast_body_literal_t {
location: location,
sign: sign as clingo_ast_sign_t,
type_: type_ as clingo_ast_body_literal_type_t,
__bindgen_anon_1: _bg_union_2,
})
}
}
#[derive(Clone, Copy)]
pub struct AstRule(clingo_ast_rule_t);
impl AstRule {
pub fn new(AstHeadLiteral(head): AstHeadLiteral, body: &[AstBodyLiteral]) -> AstRule {
let rule = clingo_ast_rule {
head: head,
body: body.as_ptr() as *const clingo_ast_body_literal_t,
size: body.len(),
};
AstRule(rule)
}
pub fn head(&self) -> AstHeadLiteral {
let AstRule(ref rule) = *self;
AstHeadLiteral(rule.head)
}
pub fn body(&self) -> &[AstBodyLiteral] {
let AstRule(ref rule) = *self;
unsafe { std::slice::from_raw_parts(rule.body as *const AstBodyLiteral, rule.size) }
}
pub fn size(&self) -> usize {
let AstRule(ref rule) = *self;
rule.size
}
}
#[derive(Clone, Copy)]
pub struct AstExternal(clingo_ast_external_t);
impl AstExternal {
pub fn new(AstTerm(term): AstTerm, body: &[AstBodyLiteral]) -> AstExternal {
let ext = clingo_ast_external {
atom: term,
body: body.as_ptr() as *const clingo_ast_body_literal_t,
size: body.len(),
};
AstExternal(ext)
}
}
#[derive(Clone)]
pub struct AstStatement(clingo_ast_statement_t);
impl AstStatement {
pub fn new_external(
Location(location): Location,
type_: AstStatementType,
ext: &AstExternal,
) -> AstStatement {
let external: *const AstExternal = ext;
let _bg_union_2 = clingo_ast_statement__bindgen_ty_1 {
external: external as *const clingo_ast_external,
};
let stm = clingo_ast_statement_t {
location: location,
type_: type_ as clingo_ast_statement_type_t,
__bindgen_anon_1: _bg_union_2,
};
AstStatement(stm)
}
pub fn new_rule(Location(location): Location, rule_: &AstRule) -> AstStatement {
let rule: *const AstRule = rule_;
let _bg_union_2 = clingo_ast_statement__bindgen_ty_1 {
rule: rule as *const clingo_ast_rule,
};
let stm = clingo_ast_statement_t {
location: location,
type_: AstStatementType::Rule as clingo_ast_statement_type_t,
__bindgen_anon_1: _bg_union_2,
};
AstStatement(stm)
}
pub fn location(&self) -> Location {
Location(self.0.location)
}
pub fn get_type(&self) -> Result<AstStatementType, &'static str> {
let AstStatement(ref stm) = *self;
match stm.type_ as u32 {
clingo_ast_statement_type_clingo_ast_statement_type_rule => Ok(AstStatementType::Rule),
clingo_ast_statement_type_clingo_ast_statement_type_const => {
Ok(AstStatementType::Const)
}
clingo_ast_statement_type_clingo_ast_statement_type_show_signature => {
Ok(AstStatementType::ShowSignature)
}
clingo_ast_statement_type_clingo_ast_statement_type_show_term => {
Ok(AstStatementType::ShowTerm)
}
clingo_ast_statement_type_clingo_ast_statement_type_minimize => {
Ok(AstStatementType::Minimize)
}
clingo_ast_statement_type_clingo_ast_statement_type_script => {
Ok(AstStatementType::Script)
}
clingo_ast_statement_type_clingo_ast_statement_type_program => {
Ok(AstStatementType::Program)
}
clingo_ast_statement_type_clingo_ast_statement_type_external => {
Ok(AstStatementType::External)
}
clingo_ast_statement_type_clingo_ast_statement_type_edge => Ok(AstStatementType::Edge),
clingo_ast_statement_type_clingo_ast_statement_type_heuristic => {
Ok(AstStatementType::Heuristic)
}
clingo_ast_statement_type_clingo_ast_statement_type_project_atom => {
Ok(AstStatementType::ProjectAtom)
}
clingo_ast_statement_type_clingo_ast_statement_type_project_atom_signature => {
Ok(AstStatementType::ProjectAtomSignature)
}
clingo_ast_statement_type_clingo_ast_statement_type_theory_definition => {
Ok(AstStatementType::TheoryDefinition)
}
_ => Err("Rust binding failed to match clingo ast statement type"),
}
}
pub unsafe fn rule(&self) -> &AstRule {
let AstStatement(ref stm) = *self;
let ast_rule_ptr = stm.__bindgen_anon_1.rule as *const clingo_ast_rule_t;
(ast_rule_ptr as *const AstRule).as_ref().unwrap()
}
}
#[derive(Clone, Copy)]
pub struct AstTerm(clingo_ast_term_t);
impl AstTerm {
pub fn new_symbol(Location(location): Location, Symbol(symbol): Symbol) -> AstTerm {
let _bg_union_1 = clingo_ast_term__bindgen_ty_1 { symbol: symbol };
let term = clingo_ast_term_t {
location: location,
type_: clingo_ast_term_type_clingo_ast_term_type_symbol as clingo_ast_term_type_t,
__bindgen_anon_1: _bg_union_1,
};
AstTerm(term)
}
pub fn location(&self) -> Location {
Location(self.0.location)
}
}
#[derive(Clone, Copy)]
pub struct AstLiteral(clingo_ast_literal_t);
impl AstLiteral {
pub fn new(
Location(location): Location,
sign: AstSign,
type_: AstLiteralType,
sym: &AstTerm,
) -> AstLiteral {
let symbol: *const AstTerm = sym;
let _bg_union_2 = clingo_ast_literal__bindgen_ty_1 {
symbol: symbol as *const clingo_sys::clingo_ast_term,
};
let lit = clingo_ast_literal_t {
location: location,
type_: type_ as clingo_ast_literal_type_t,
sign: sign as clingo_ast_sign_t,
__bindgen_anon_1: _bg_union_2,
};
AstLiteral(lit)
}
}
pub struct Configuration(clingo_configuration_t);
impl Configuration {
/// Get the root key of the configuration.
pub fn root(&mut self) -> Option<Id> {
let mut root_key = 0 as clingo_id_t;
if unsafe { clingo_configuration_root(&mut self.0, &mut root_key) } {
Some(Id(root_key))
} else {
None
}
}
/// Get the type of a key.
// TODO: The type is bitset, an entry can have multiple (but at least one) type.
pub fn configuration_type(&mut self, Id(key): Id) -> Result<ConfigurationType, &'static str> {
let mut ctype = 0 as clingo_configuration_type_bitset_t;
if unsafe { clingo_configuration_type(&mut self.0, key, &mut ctype) } {
match ctype as u32 {
clingo_configuration_type_clingo_configuration_type_value => {
Ok(ConfigurationType::Value)
}
clingo_configuration_type_clingo_configuration_type_array => {
Ok(ConfigurationType::Array)
}
clingo_configuration_type_clingo_configurations_type_map => {
Ok(ConfigurationType::Map)
}
_ => Err("Rust binding failed to match clingo configuration type"),
}
} else {
Err("Rust binding failed to detect clingo configuration type")
}
}
/// Get the description of an entry.
pub fn description(&mut self, Id(key): Id) -> Option<&str> {
let mut description_ptr = unsafe { mem::uninitialized() };
if unsafe {
clingo_configuration_description(
&mut self.0,
key,
&mut description_ptr as *mut *const c_char,
)
} {
let cstr = unsafe { CStr::from_ptr(description_ptr) };
Some(cstr.to_str().unwrap())
} else {
None
}
}
/// Get the size of an array entry.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Array`](enum.ConfigurationType.html#variant.Array).
pub fn array_size(&mut self, Id(key): Id) -> Option<usize> {
let mut size = 0;
if unsafe { clingo_configuration_array_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the subkey at the given offset of an array entry.
///
/// **Note:** Some array entries, like fore example the solver configuration, can be accessed past there actual size to add subentries.
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Array`](enum.ConfigurationType.html#variant.Array).
///
/// # Arguments
///
/// * `key` - the key
/// * `offset` - the offset in the array
pub fn array_at(&mut self, Id(key): Id, offset: usize) -> Option<Id> {
let mut nkey = 0 as clingo_id_t;
if unsafe { clingo_configuration_array_at(&mut self.0, key, offset, &mut nkey) } {
Some(Id(nkey))
} else {
None
}
}
/// Get the number of subkeys of a map entry.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Map`](enum.ConfigurationType.html#variant.Map).
pub fn map_size(&mut self, Id(key): Id) -> Option<usize> {
let mut size = 0;
if unsafe { clingo_configuration_map_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the name associated with the offset-th subkey.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Map`](enum.ConfigurationType.html#variant.Map).
///
/// # Arguments:
///
/// * `key` - the key
/// * `offset` - the offset of the name
pub fn map_subkey_name(&mut self, Id(key): Id, offset: usize) -> Option<&str> {
let mut name_ptr = unsafe { mem::uninitialized() };
if unsafe {
clingo_configuration_map_subkey_name(
&mut self.0,
key,
offset,
&mut name_ptr as *mut *const c_char,
)
} {
let cstr = unsafe { CStr::from_ptr(name_ptr) };
Some(cstr.to_str().unwrap())
} else {
None
}
}
/// Lookup a subkey under the given name.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Map`](enum.ConfigurationType.html#variant.Map).
///
/// **Note:** Multiple levels can be looked up by concatenating keys with a period.
pub fn map_at(&mut self, Id(key): Id, name: &str) -> Option<Id> {
let mut nkey = 0 as clingo_id_t;
let name_c_str = CString::new(name).unwrap();
if unsafe { clingo_configuration_map_at(&mut self.0, key, name_c_str.as_ptr(), &mut nkey) }
{
Some(Id(nkey))
} else {
None
}
}
/// Check whether a entry has a value.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Value`](enum.ConfigurationType.html#variant.Value).
///
/// # Arguments:
///
/// * `key` - the key
pub fn value_is_assigned(&mut self, Id(key): Id) -> Option<bool> {
let mut assigned = false;
if unsafe { clingo_configuration_value_is_assigned(&mut self.0, key, &mut assigned) } {
Some(assigned)
} else {
None
}
}
//NOTTODO obsolete: clingo_configuration_value_get_size(&mut self.0, key, &mut size) }
/// Get the string value of the given entry.
///
/// # Pre-condition
///
/// - The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Value`](enum.ConfigurationType.html#variant.Value).
///
/// # Arguments:
///
/// * `key` - the key
pub fn value_get(&mut self, Id(key): Id) -> Option<&str> {
let mut size = 0;
if unsafe { clingo_configuration_value_get_size(&mut self.0, key, &mut size) } {
let mut value_ptr = unsafe { mem::uninitialized() };
if unsafe { clingo_configuration_value_get(&mut self.0, key, &mut value_ptr, size) } {
let cstr = unsafe { CStr::from_ptr(&value_ptr) };
Some(cstr.to_str().unwrap())
} else {
None
}
} else {
None
}
}
/// Set the value of an entry.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Value`](enum.ConfigurationType.html#variant.Value).
///
/// # Arguments
///
/// * `key` - the key
/// * `value` - the value to set
pub fn value_set(&mut self, Id(key): Id, value: &str) -> Option<()> {
let value_c_str = CString::new(value).unwrap();
if unsafe { clingo_configuration_value_set(&mut self.0, key, value_c_str.as_ptr()) } {
Some(())
} else {
None
}
}
}
pub struct Backend(clingo_backend_t);
impl Backend {
/// Add a rule to the program.
///
/// # Arguments
///
/// * `choice` determines if the head is a choice or a disjunction
/// * `head` - the head atoms
/// * `body` - the body literals
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn rule(&mut self, choice: bool, head: &[Atom], body: &[Literal]) -> Result<(), Error> {
if unsafe {
clingo_backend_rule(
&mut self.0,
choice,
head.as_ptr() as *const clingo_atom_t,
head.len(),
body.as_ptr() as *const clingo_literal_t,
body.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add a weight rule to the program.
///
/// **Attention:** All weights and the lower bound must be positive.
///
/// # Arguments
/// * `choice` - determines if the head is a choice or a disjunction
/// * `head` - the head atoms
/// * `lower_bound` - the lower bound of the weight rule
/// * `body` - the weighted body literals
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn weight_rule(
&mut self,
choice: bool,
head: &[Atom],
lower_bound: i32,
body: &[WeightedLiteral],
) -> Result<(), Error> {
if unsafe {
clingo_backend_weight_rule(
&mut self.0,
choice,
head.as_ptr() as *const clingo_atom_t,
head.len(),
lower_bound,
body.as_ptr() as *const clingo_weighted_literal_t,
body.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add a minimize constraint (or weak constraint) to the program.
///
/// # Arguments:
///
/// * `priority` - the priority of the constraint
/// * `literals` - the weighted literals whose sum to minimize
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn minimize(&mut self, priority: i32, literals: &[WeightedLiteral]) -> Result<(), Error> {
if unsafe {
clingo_backend_minimize(
&mut self.0,
priority,
literals.as_ptr() as *const clingo_weighted_literal_t,
literals.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add a projection directive.
///
/// # Arguments:
///
/// * `atoms` - the atoms to project on
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn project(&mut self, atoms: &[Atom]) -> Result<(), Error> {
if unsafe {
clingo_backend_project(
&mut self.0,
atoms.as_ptr() as *const clingo_atom_t,
atoms.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
//TODO pub fn clingo_backend_external(backend: *mut Backend,
// atom: clingo_atom_t,
// type_: clingo_external_type_t)
// -> u8;
/// Add an assumption directive.
///
/// # Arguments
///
/// * `literals` - the literals to assume (positive literals are true and negative literals
/// false for the next solve call)
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn assume(&mut self, literals: &[Literal]) -> Result<(), &'static str> {
let size = literals.len();
if unsafe {
clingo_backend_assume(
&mut self.0,
literals.as_ptr() as *const clingo_literal_t,
size,
)
} {
Ok(())
} else {
Err(error_message())
}
}
//TODO pub fn clingo_backend_heuristic(backend: *mut Backend,
// atom: clingo_atom_t,
// type_: clingo_heuristic_type_t,
// bias: c_int,
// priority: ::std::os::raw::c_uint,
// condition: *const clingo_literal_t,
// size: size_t)
// -> u8;
//TODO pub fn clingo_backend_acyc_edge(backend: *mut Backend,
// node_u: c_int,
// node_v: c_int,
// condition: *const clingo_literal_t,
// size: size_t)
// -> u8;
/// Get a fresh atom to be used in aspif directives.
pub fn add_atom(&mut self) -> Option<Atom> {
let mut atom = 0 as clingo_atom_t;
if unsafe { clingo_backend_add_atom(&mut self.0, &mut atom) } {
Some(Atom(atom))
} else {
None
}
}
}
pub struct Statistics(clingo_statistics_t);
impl Statistics {
/// Get the root key of the statistics.
pub fn root(&mut self) -> Option<u64> {
let mut root_key = 0 as u64;
if unsafe { clingo_statistics_root(&mut self.0, &mut root_key) } {
Some(root_key)
} else {
None
}
}
/// Get the type of a key.
///
/// # Errors
///
/// - Failure to match clingo statistics type
pub fn statistics_type(&mut self, key: u64) -> Result<StatisticsType, &'static str> {
let mut stype = 0 as clingo_statistics_type_t;
if unsafe { clingo_statistics_type(&mut self.0, key, &mut stype) } {
match stype as u32 {
clingo_statistics_type_clingo_statistics_type_empty => Ok(StatisticsType::Empty),
clingo_statistics_type_clingo_statistics_type_value => Ok(StatisticsType::Value),
clingo_statistics_type_clingo_statistics_type_array => Ok(StatisticsType::Array),
clingo_statistics_type_clingo_statistics_type_map => Ok(StatisticsType::Map),
_ => Err("Rust binding failed to match clingo statistics type"),
}
} else {
Err(error_message())
}
}
/// Get the size of an array entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Array`](enum.StatisticsType.html#variant.Array).
pub fn array_size(&mut self, key: u64) -> Option<usize> {
let mut size = 0 as usize;
if unsafe { clingo_statistics_array_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the subkey at the given offset of an array entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Array`](enum.StatisticsType.html#variant.Array).
///
/// # Arguments
///
/// * `key` - the key
/// * `offset` - the offset in the array
pub fn statistics_array_at(&mut self, key: u64, offset: usize) -> Option<u64> {
let mut subkey = 0 as u64;
if unsafe { clingo_statistics_array_at(&mut self.0, key, offset, &mut subkey) } {
Some(subkey)
} else {
None
}
}
/// Get the number of subkeys of a map entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must
/// be [`StatisticsType::Map`](enum.StatisticsType.html#variant.Map).
pub fn map_size(&mut self, key: u64) -> Option<usize> {
let mut size = 0 as usize;
if unsafe { clingo_statistics_map_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the name associated with the offset-th subkey.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Map`](enum.StatisticsType.html#variant.Map).
///
/// # Arguments
///
/// * `key` - the key
/// * `offset` - the offset of the name
pub fn map_subkey_name<'a>(&mut self, key: u64, offset: usize) -> Option<&'a str> {
let mut name = std::ptr::null() as *const c_char;
if unsafe { clingo_statistics_map_subkey_name(&mut self.0, key, offset, &mut name) } {
Some(unsafe { CStr::from_ptr(name) }.to_str().unwrap())
} else {
None
}
}
/// Lookup a subkey under the given name.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Map`](enum.StatisticsType.html#variant.Map).
///
/// **Note:** Multiple levels can be looked up by concatenating keys with a period.
///
/// # Arguments
///
/// * `key` - the key
/// * `name` - the name to lookup the subkey
pub fn map_at(&mut self, key: u64, name: &str) -> Option<u64> {
let mut subkey = 0 as u64;
let name_c_str = CString::new(name).unwrap();
if unsafe { clingo_statistics_map_at(&mut self.0, key, name_c_str.as_ptr(), &mut subkey) } {
Some(subkey)
} else {
None
}
}
/// Get the value of the given entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Value`](enum.StatisticsType.html#variant.Value).
pub fn value_get(&mut self, key: u64) -> Option<f64> {
let mut value = 0.0 as f64;
if unsafe { clingo_statistics_value_get(&mut self.0, key, &mut value) } {
Some(value)
} else {
None
}
}
}
pub struct Signature(clingo_signature_t);
impl Signature {
/// Create a new signature.
///
/// # Arguments
///
/// * `name` name of the signature
/// * `arity` arity of the signature
/// * `positive` false if the signature has a classical negation sign
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create(name_: &str, arity: u32, positive: bool) -> Result<Signature, &'static str> {
let name_c_str = CString::new(name_).unwrap();
let mut signature = 0;
if unsafe { clingo_signature_create(name_c_str.as_ptr(), arity, positive, &mut signature) }
{
Ok(Signature(signature))
} else {
Err(error_message())
}
}
}
#[derive(Debug)]
pub struct SymbolicAtoms(clingo_symbolic_atoms_t);
impl SymbolicAtoms {
/// Get a forward iterator to the beginning of the sequence of all symbolic
/// atoms optionally restricted to a given signature.
///
/// # Arguments
///
/// * `signature` optional signature
pub fn begin(
&mut self,
opt_sig: Option<&Signature>,
) -> Option<clingo_symbolic_atom_iterator_t> {
match opt_sig {
Some(sig) => {
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_begin(&mut self.0, &sig.0, &mut iterator) } {
Some(iterator)
} else {
None
}
}
None => {
let signature = std::ptr::null();
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_begin(&mut self.0, signature, &mut iterator) } {
Some(iterator)
} else {
None
}
}
}
}
/// Iterator pointing to the end of the sequence of symbolic atoms.
pub fn end(&mut self) -> Option<clingo_symbolic_atom_iterator_t> {
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_end(&mut self.0, &mut iterator) } {
Some(iterator)
} else {
None
}
}
/// Find a symbolic atom given its symbolic representation.
///
/// # Arguments
///
/// * `symbol` - the symbol to lookup
/// * `iterator` iterator pointing to the symbolic atom or to the end
/// of the sequence if no corresponding atom is found
pub fn find(&mut self, Symbol(symbol): Symbol) -> Option<clingo_symbolic_atom_iterator_t> {
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_find(&mut self.0, symbol, &mut iterator) } {
Some(iterator)
} else {
None
}
}
/// Check if two iterators point to the same element (or end of the sequence).
///
/// # Arguments
///
/// * `a` - the first iterator
/// * `b` - the second iterator
pub fn iterator_is_equal_to(
&mut self,
a: clingo_symbolic_atom_iterator_t,
b: clingo_symbolic_atom_iterator_t,
) -> Option<bool> {
let mut equal = false;
if unsafe { clingo_symbolic_atoms_iterator_is_equal_to(&mut self.0, a, b, &mut equal) } {
Some(equal)
} else {
None
}
}
/// Get the symbolic representation of an atom.
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn symbol(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<Symbol> {
let mut symbol = 0 as clingo_symbol_t;
if unsafe { clingo_symbolic_atoms_symbol(&mut self.0, iterator, &mut symbol) } {
Some(Symbol(symbol))
} else {
None
}
}
/// Check whether an atom is a fact.
///
/// **Note:** This does not determine if an atom is a cautious consequence. The
/// grounding or solving component`s simplifications can only detect this in
/// some cases.
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn is_fact(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<bool> {
let mut fact = false;
if unsafe { clingo_symbolic_atoms_is_fact(&mut self.0, iterator, &mut fact) } {
Some(fact)
} else {
None
}
}
/// Check whether an atom is external.
///
/// An atom is external if it has been defined using an external directive and
/// has not been released or defined by a rule.
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn is_external(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<bool> {
let mut external = false;
if unsafe { clingo_symbolic_atoms_is_external(&mut self.0, iterator, &mut external) } {
Some(external)
} else {
None
}
}
/// Returns the (numeric) aspif literal corresponding to the given symbolic atom.
///
/// Such a literal can be mapped to a solver literal (see the \ref Propagator
/// module) or be used in rules in aspif format (see the \ref ProgramBuilder
/// module).
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn literal(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<Literal> {
let mut literal = 0 as clingo_literal_t;
if unsafe { clingo_symbolic_atoms_literal(&mut self.0, iterator, &mut literal) } {
Some(Literal(literal))
} else {
None
}
}
//TODO pub fn clingo_symbolic_atoms_signatures_size(atoms: *mut SymbolicAtoms,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_symbolic_atoms_signatures(atoms: *mut SymbolicAtoms,
// signatures: *mut clingo_signature_t,
// size: size_t)
// -> u8;
/// Get an iterator to the next element in the sequence of symbolic atoms.
///
/// # Arguments
///
/// * `iterator` - the current iterator
pub fn next(
&mut self,
iterator: clingo_symbolic_atom_iterator_t,
) -> Option<clingo_symbolic_atom_iterator_t> {
let mut next = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_next(&mut self.0, iterator, &mut next) } {
Some(next)
} else {
None
}
}
//TODO pub fn clingo_symbolic_atoms_is_valid(atoms: *mut SymbolicAtoms,
// iterator: clingo_symbolic_atom_iterator_t,
// valid: *mut u8)
// -> u8;
}
pub struct TheoryAtoms(clingo_theory_atoms_t);
impl TheoryAtoms {
//TODO pub fn clingo_theory_atoms_term_type(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// type_: *mut clingo_theory_term_type_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_term_number(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// number: *mut c_int)
// -> u8;
/// Get the name of the given constant or function theory term.
///
/// # Pre-condition
///
/// The term must be of type ::clingo_theory_term_type_function or ::clingo_theory_term_type_symbol.
///
/// # Arguments
///
/// * `term` id of the term
pub fn term_name<'a>(&mut self, Id(term): Id) -> Option<&'a str> {
let mut char_ptr = std::ptr::null() as *const c_char;
if unsafe { clingo_theory_atoms_term_name(&mut self.0, term, &mut char_ptr) } {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
Some(c_str.to_str().unwrap())
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_term_arguments(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// arguments: *mut *const clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_term_to_string_size(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_term_to_string(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// string: *mut c_char,
// size: size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_tuple(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// tuple: *mut *const clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_condition(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// condition: *mut *const clingo_literal_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_condition_id(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// condition: *mut clingo_literal_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_to_string_size(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_to_string(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// string: *mut c_char,
// size: size_t)
// -> u8;
/// Get the total number of theory atoms.
pub fn size(&mut self) -> Option<usize> {
let mut size = 0 as usize;
if unsafe { clingo_theory_atoms_size(&mut self.0, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the theory term associated with the theory atom.
///
/// # Arguments
///
/// * `atom` id of the atom
pub fn atom_term(&mut self, Id(atom): Id) -> Option<Id> {
let mut term = 0 as clingo_id_t;
if unsafe { clingo_theory_atoms_atom_term(&mut self.0, atom, &mut term) } {
Some(Id(term))
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_atom_elements(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// elements: *mut *const clingo_id_t,
// size: *mut size_t)
// -> u8;
/// Whether the theory atom has a guard.
///
/// # Arguments
///
/// * `atom` id of the atom
pub fn atom_has_guard(&mut self, Id(atom): Id) -> Option<bool> {
let mut has_guard = false;
if unsafe { clingo_theory_atoms_atom_has_guard(&mut self.0, atom, &mut has_guard) } {
Some(has_guard)
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_atom_guard(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// connective: *mut *const c_char,
// term: *mut clingo_id_t)
// -> u8;
/// Get the aspif literal associated with the given theory atom.
///
/// # Arguments
///
/// * `atom` id of the atom
pub fn atom_literal(&mut self, Id(atom): Id) -> Option<Literal> {
let mut literal = 0 as clingo_literal_t;
if unsafe { clingo_theory_atoms_atom_literal(&mut self.0, atom, &mut literal) } {
Some(Literal(literal))
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_atom_to_string_size(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_atom_to_string(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// string: *mut c_char,
// size: size_t)
// -> u8;
}
pub struct UNSAFE_TheoryAtomsIterator {
count: usize,
size: usize,
}
impl Iterator for UNSAFE_TheoryAtomsIterator {
type Item = Id;
fn next(&mut self) -> Option<Id> {
// increment our count. This is why we started at zero.
self.count += 1;
// check to see if we've finished counting or not.
if self.count < self.size {
Some(Id(self.count as u32))
} else {
None
}
}
}
impl UNSAFE_TheoryAtomsIterator {
pub fn from(cta: &mut TheoryAtoms) -> UNSAFE_TheoryAtomsIterator {
UNSAFE_TheoryAtomsIterator {
count: 0,
size: cta.size().unwrap(),
}
}
}
pub struct Model(clingo_model_t);
impl Model {
/// Get the type of the model.
pub fn model_type(&mut self) -> Option<clingo_model_type_t> {
let mut mtype = 0 as clingo_model_type_t;
if unsafe { clingo_model_type(&mut self.0, &mut mtype) } {
Some(mtype)
} else {
None
}
}
/// Get the running number of the model.
pub fn number(&mut self) -> Option<u64> {
let mut number = 0;
if unsafe { clingo_model_number(&mut self.0, &mut number) } {
Some(number)
} else {
None
}
}
//NOTTODO pub fn clingo_model_symbols_size(model: *mut Model,
// show: clingo_show_type_bitset_t,
// size: *mut size_t)
// -> bool;
/// Get the symbols of the selected types in the model.
///
/// **Note:** CSP assignments are represented using functions with name "$"
/// where the first argument is the name of the CSP variable and the second one its
/// value.
///
/// # Arguments
///
/// * `show` - which symbols to select
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if the size is too small
pub fn symbols(
&mut self,
show: clingo_show_type_bitset_t,
) -> Result<Vec<Symbol>, &'static str> {
let Model(ref mut model) = *self;
let mut size: usize = 0;
let size_p = &mut size as *mut usize;
if unsafe { clingo_model_symbols_size(model, show, size_p) } {
let symbols = Vec::<Symbol>::with_capacity(size);
let symbols_ptr = symbols.as_ptr();
if unsafe {
clingo_model_symbols(
model,
show as clingo_show_type_bitset_t,
symbols_ptr as *mut clingo_symbol_t,
size,
)
} {
let symbols_ref =
unsafe { std::slice::from_raw_parts(symbols_ptr as *const Symbol, size) };
Ok(symbols_ref.to_owned())
} else {
Err(error_message())
}
} else {
Err(error_message())
}
}
//TODO pub fn clingo_model_contains(model: *mut Model,
// atom: clingo_symbol_t,
// contained: *mut u8)
// -> u8;
//TODO pub fn clingo_model_cost_size(model: *mut Model, size: *mut size_t) -> u8;
//TODO pub fn clingo_model_cost(model: *mut Model, costs: *mut int64_t, size: size_t) -> u8;
//TODO pub fn clingo_model_optimality_proven(model: *mut Model, proven: *mut u8) -> u8;
//TODO pub fn clingo_model_context(model: *mut Model,
// control: *mut *mut SolveControl)
// -> u8;
}
pub struct SolveControl(clingo_solve_control_t);
impl SolveControl {
/// Add a clause that applies to the current solving step during model
/// enumeration.
///
/// **Note:** The [`Propagator`](enum.Propagator.html) module provides a more sophisticated
/// interface to add clauses - even on partial assignments.
///
/// # Arguments
///
/// * `clause` array of literals representing the clause
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if adding the clause fails
pub fn add_clause(&mut self, clause: &[Literal]) -> Result<(), &'static str> {
if unsafe {
clingo_solve_control_add_clause(
&mut self.0,
clause.as_ptr() as *const clingo_literal_t,
clause.len(),
)
} {
Ok(())
} else {
Err(error_message())
}
}
}
pub struct PropagateControl(clingo_propagate_control_t);
impl PropagateControl {
/// Get the id of the underlying solver thread.
///
/// Thread ids are consecutive numbers starting with zero.
pub fn thread_id(&mut self) -> u32 {
unsafe { clingo_propagate_control_thread_id(&mut self.0) }
}
//TODO pub fn clingo_propagate_control_assignment(control: *mut PropagateControl)
// -> *mut clingo_assignment_t;
/// Add the given clause to the solver.
///
/// This method sets its result to false if the current propagation must be stopped for the solver to backtrack.
///
/// **Attention:** No further calls on the control object or functions on the assignment should be called when the result of this method is false.
///
/// # Arguments
///
/// * `clause` - the clause to add
/// * `type` - the clause type determining its lifetime
///
/// **Returns** result indicating whether propagation has to be stopped
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn add_clause(
&mut self,
clause: &[Literal],
type_: ClauseType,
) -> Result<bool, &'static str> {
let mut result = false;
if unsafe {
clingo_propagate_control_add_clause(
&mut self.0,
clause.as_ptr() as *const clingo_literal_t,
clause.len(),
type_ as clingo_clause_type_t,
&mut result,
)
} {
Ok(result)
} else {
Err(error_message())
}
}
/// Propagate implied literals (resulting from added clauses).
///
/// This method sets its result to false if the current propagation must be stopped for the
/// solver to backtrack.
///
/// **Attention:** No further calls on the control object or functions on the assignment should
/// be called when the result of this method is false.
///
/// **Returns** result indicating whether propagation has to be stopped
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn propagate(&mut self) -> Result<bool, &'static str> {
let mut result = false;
if unsafe { clingo_propagate_control_propagate(&mut self.0, &mut result) } {
Ok(result)
} else {
Err(error_message())
}
}
}
pub struct PropagateInit(clingo_propagate_init_t);
impl PropagateInit {
/// Map the given program literal or condition id to its solver literal.
///
/// # Arguments
///
/// * `aspif_literal` - the aspif literal to map
///
/// **Returns** the corresponding solver literal
pub fn solver_literal(&mut self, Literal(aspif_literal): Literal) -> Option<Literal> {
let mut solver_literal = 0 as clingo_literal_t;
if unsafe {
clingo_propagate_init_solver_literal(&mut self.0, aspif_literal, &mut solver_literal)
} {
Some(Literal(solver_literal))
} else {
None
}
}
/// Add a watch for the solver literal in the given phase.
///
/// # Arguments
///
/// * `solver_literal` - the solver literal
pub fn add_watch(&mut self, Literal(solver_literal): Literal) -> Option<()> {
if unsafe { clingo_propagate_init_add_watch(&mut self.0, solver_literal) } {
Some(())
} else {
None
}
}
/// Get an object to inspect the symbolic atoms.
pub fn symbolic_atoms<'a>(&mut self) -> Option<&'a mut SymbolicAtoms> {
let mut atoms_ptr = std::ptr::null_mut();
if unsafe { clingo_propagate_init_symbolic_atoms(&mut self.0, &mut atoms_ptr) } {
unsafe { (atoms_ptr as *mut SymbolicAtoms).as_mut() }
} else {
None
}
}
//TODO pub fn clingo_propagate_init_theory_atoms(init: &mut PropagateInit,
// atoms: *mut *mut TheoryAtoms)
// -> bool;
/// Get the number of threads used in subsequent solving.
/// **See:** [`PropagateControl::thread_id()`](struct.PropagateControl.html#method.thread_id)
pub fn number_of_threads(&mut self) -> usize {
(unsafe { clingo_propagate_init_number_of_threads(&mut self.0) } as usize)
}
}
pub struct SolveHandle(clingo_solve_handle);
impl SolveHandle {
/// Get the next solve result.
///
/// Blocks until the result is ready.
/// When yielding partial solve results can be obtained, i.e.,
/// when a model is ready, the result will be satisfiable but neither the search exhausted nor the optimality proven.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn get(&mut self) -> Result<clingo_solve_result_bitset_t, &'static str> {
let mut result = 0;
if unsafe { clingo_solve_handle_get(&mut self.0, &mut result) } {
Ok(result)
} else {
Err(error_message())
}
}
/// Get the next model (or zero if there are no more models).
/// (it is NULL if there are no more models)
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn model(&mut self) -> Result<&mut Model, &'static str> {
let SolveHandle(ref mut handle) = *self;
let mut model = std::ptr::null_mut() as *mut clingo_model_t;
if unsafe { clingo_solve_handle_model(handle, &mut model) } {
unsafe { (model as *mut Model).as_mut() }
.ok_or("Rust binding failed to dereference pointer to clingo model")
} else {
Err(error_message())
}
}
/// Discards the last model and starts the search for the next one.
///
/// If the search has been started asynchronously, this function continues the search in the background.
///
/// **Note:** This function does not block.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn resume(&mut self) -> Result<(), &'static str> {
let SolveHandle(ref mut handle) = *self;
if unsafe { clingo_solve_handle_resume(handle) } {
Ok(())
} else {
Err(error_message())
}
}
/// Stops the running search and releases the handle.
///
/// Blocks until the search is stopped (as if an implicit cancel was called before the handle is
/// released).
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn close(&mut self) -> Result<(), &'static str> {
let SolveHandle(ref mut handle) = *self;
if unsafe { clingo_solve_handle_close(handle) } {
Ok(())
} else {
Err(error_message())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn version_test() {
let (ma, mi, re) = version();
assert!(ma == 5);
assert!(mi == 2);
assert!(re == 2);
}
#[test]
fn parse_program_test() {
let mut sym = create_number(42);
assert!(42 == sym.number().unwrap());
sym = create_infimum();
assert!(SymbolType::Infimum == sym.get_type().unwrap());
}
}
add method external to Backend
#![feature(ptr_internals)]
extern crate clingo_sys;
extern crate libc;
use std::mem;
use std::ptr::Unique;
use std::ffi::CStr;
use std::ffi::CString;
use libc::c_int;
use libc::c_char;
use clingo_sys::*;
pub use clingo_sys::{clingo_ast_statement_t, clingo_ast_term_type_t, clingo_logger_t};
pub use clingo_sys::clingo_show_type_bitset_t;
pub use clingo_sys::clingo_solve_mode_bitset_t;
#[derive(Debug, Copy, Clone)]
pub enum Error {
Success = clingo_error_clingo_error_success as isize,
Runtime = clingo_error_clingo_error_runtime as isize,
Logic = clingo_error_clingo_error_logic as isize,
BadAlloc = clingo_error_clingo_error_bad_alloc as isize,
Unknown = clingo_error_clingo_error_unknown as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum SolveMode {
Async = clingo_solve_mode_clingo_solve_mode_async as isize,
Yield = clingo_solve_mode_clingo_solve_mode_yield as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ShowType {
CSP = clingo_show_type_clingo_show_type_csp as isize,
Shown = clingo_show_type_clingo_show_type_shown as isize,
Atoms = clingo_show_type_clingo_show_type_atoms as isize,
Terms = clingo_show_type_clingo_show_type_terms as isize,
Extra = clingo_show_type_clingo_show_type_extra as isize,
All = clingo_show_type_clingo_show_type_all as isize,
Complement = clingo_show_type_clingo_show_type_complement as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum TruthValue {
Free = clingo_truth_value_clingo_truth_value_free as isize,
True = clingo_truth_value_clingo_truth_value_true as isize,
False = clingo_truth_value_clingo_truth_value_false as isize,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum AstStatementType {
Rule = clingo_ast_statement_type_clingo_ast_statement_type_rule as isize,
Const = clingo_ast_statement_type_clingo_ast_statement_type_const as isize,
ShowSignature = clingo_ast_statement_type_clingo_ast_statement_type_show_signature as isize,
ShowTerm = clingo_ast_statement_type_clingo_ast_statement_type_show_term as isize,
Minimize = clingo_ast_statement_type_clingo_ast_statement_type_minimize as isize,
Script = clingo_ast_statement_type_clingo_ast_statement_type_script as isize,
Program = clingo_ast_statement_type_clingo_ast_statement_type_program as isize,
External = clingo_ast_statement_type_clingo_ast_statement_type_external as isize,
Edge = clingo_ast_statement_type_clingo_ast_statement_type_edge as isize,
Heuristic = clingo_ast_statement_type_clingo_ast_statement_type_heuristic as isize,
ProjectAtom = clingo_ast_statement_type_clingo_ast_statement_type_project_atom as isize,
ProjectAtomSignature =
clingo_ast_statement_type_clingo_ast_statement_type_project_atom_signature as isize,
TheoryDefinition =
clingo_ast_statement_type_clingo_ast_statement_type_theory_definition as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum AstSign {
None = clingo_ast_sign_clingo_ast_sign_none as isize,
Negation = clingo_ast_sign_clingo_ast_sign_negation as isize,
DoubleNegation = clingo_ast_sign_clingo_ast_sign_double_negation as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum AstLiteralType {
Boolean = clingo_ast_literal_type_clingo_ast_literal_type_boolean as isize,
Symbolic = clingo_ast_literal_type_clingo_ast_literal_type_symbolic as isize,
Comparison = clingo_ast_literal_type_clingo_ast_literal_type_comparison as isize,
CSP = clingo_ast_literal_type_clingo_ast_literal_type_csp as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum AstBodyLiteralType {
Literal = clingo_ast_body_literal_type_clingo_ast_body_literal_type_literal as isize,
Conditional = clingo_ast_body_literal_type_clingo_ast_body_literal_type_conditional as isize,
Aggregate = clingo_ast_body_literal_type_clingo_ast_body_literal_type_aggregate as isize,
BodyAggregate =
clingo_ast_body_literal_type_clingo_ast_body_literal_type_body_aggregate as isize,
TheoryAtom = clingo_ast_body_literal_type_clingo_ast_body_literal_type_theory_atom as isize,
Disjoint = clingo_ast_body_literal_type_clingo_ast_body_literal_type_disjoint as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ClauseType {
Learnt = clingo_clause_type_clingo_clause_type_learnt as isize,
Static = clingo_clause_type_clingo_clause_type_static as isize,
Volatile = clingo_clause_type_clingo_clause_type_volatile as isize,
VolatileStatic = clingo_clause_type_clingo_clause_type_volatile_static as isize,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum SolveEventType {
Model = clingo_solve_event_type_clingo_solve_event_type_model as isize,
Finish = clingo_solve_event_type_clingo_solve_event_type_finish as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum StatisticsType {
Empty = clingo_statistics_type_clingo_statistics_type_empty as isize,
Value = clingo_statistics_type_clingo_statistics_type_value as isize,
Array = clingo_statistics_type_clingo_statistics_type_array as isize,
Map = clingo_statistics_type_clingo_statistics_type_map as isize,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum SymbolType {
Infimum = clingo_symbol_type_clingo_symbol_type_infimum as isize,
Number = clingo_symbol_type_clingo_symbol_type_number as isize,
String = clingo_symbol_type_clingo_symbol_type_string as isize,
Function = clingo_symbol_type_clingo_symbol_type_function as isize,
Supremum = clingo_symbol_type_clingo_symbol_type_supremum as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum Warning {
OperationUndefined = clingo_warning_clingo_warning_operation_undefined as isize,
RuntimeError = clingo_warning_clingo_warning_runtime_error as isize,
AtomUndefined = clingo_warning_clingo_warning_atom_undefined as isize,
FileIncluded = clingo_warning_clingo_warning_file_included as isize,
VariableUnbound = clingo_warning_clingo_warning_variable_unbounded as isize,
GlobalVariable = clingo_warning_clingo_warning_global_variable as isize,
Other = clingo_warning_clingo_warning_other as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ConfigurationType {
Value = clingo_configuration_type_clingo_configuration_type_value as isize,
Array = clingo_configuration_type_clingo_configuration_type_array as isize,
Map = clingo_configuration_type_clingo_configuration_type_map as isize,
}
#[derive(Debug, Copy, Clone)]
pub enum ExternalType {
Free = clingo_external_type_clingo_external_type_free as isize,
True = clingo_external_type_clingo_external_type_true as isize,
False = clingo_external_type_clingo_external_type_false as isize,
Release = clingo_external_type_clingo_external_type_release as isize,
}
type SolveEventCallback = unsafe extern "C" fn(
type_: clingo_solve_event_type_t,
event: *mut ::std::os::raw::c_void,
data: *mut ::std::os::raw::c_void,
goon: *mut bool,
) -> bool;
pub trait SolveEventHandler<T> {
fn on_solve_event(type_: SolveEventType, data: &mut T, goon: &mut bool) -> bool;
#[doc(hidden)]
unsafe extern "C" fn unsafe_solve_callback(
type_: clingo_solve_event_type_t,
event: *mut ::std::os::raw::c_void,
data_: *mut ::std::os::raw::c_void,
goon_: *mut bool,
) -> bool {
// TODO assert!(!event.is_null());
assert!(!data_.is_null());
assert!(!goon_.is_null());
let event_type = match type_ {
clingo_solve_event_type_clingo_solve_event_type_model => SolveEventType::Model,
clingo_solve_event_type_clingo_solve_event_type_finish => SolveEventType::Finish,
_ => panic!("Rust binding failed to match clingo solve event type"),
};
let data = (data_ as *mut T).as_mut().unwrap();
let goon = goon_.as_mut().unwrap();
Self::on_solve_event(event_type, data, goon)
}
}
type AstCallback =
unsafe extern "C" fn(arg1: *const clingo_ast_statement_t, arg2: *mut ::std::os::raw::c_void)
-> bool;
pub trait AstStatementHandler<T> {
fn on_statement(arg1: &AstStatement, arg2: &mut T) -> bool;
#[doc(hidden)]
unsafe extern "C" fn unsafe_ast_callback(
stm_: *const clingo_ast_statement_t,
data_: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!stm_.is_null());
assert!(!data_.is_null());
let stm = (stm_ as *const AstStatement).as_ref().unwrap();
let data = (data_ as *mut T).as_mut().unwrap();
Self::on_statement(stm, data)
}
}
type LoggingCallback = unsafe extern "C" fn(
code: clingo_warning_t,
message: *const ::std::os::raw::c_char,
data: *mut ::std::os::raw::c_void,
);
pub trait Logger<T> {
fn log(code: Warning, message: &str, data: &mut T);
#[doc(hidden)]
unsafe extern "C" fn unsafe_logging_callback(
code_: clingo_warning_t,
message_: *const ::std::os::raw::c_char,
data_: *mut ::std::os::raw::c_void,
) {
assert!(!message_.is_null());
assert!(!data_.is_null());
let warning = match code_ as u32 {
clingo_warning_clingo_warning_atom_undefined => Warning::AtomUndefined,
clingo_warning_clingo_warning_file_included => Warning::FileIncluded,
clingo_warning_clingo_warning_global_variable => Warning::GlobalVariable,
clingo_warning_clingo_warning_operation_undefined => Warning::OperationUndefined,
clingo_warning_clingo_warning_other => Warning::Other,
clingo_warning_clingo_warning_runtime_error => Warning::RuntimeError,
clingo_warning_clingo_warning_variable_unbounded => Warning::VariableUnbound,
_ => panic!("Rust binding failed to match clingo warning"),
};
let c_str = CStr::from_ptr(message_);
let message = c_str.to_str().unwrap();
let data = (data_ as *mut T).as_mut().unwrap();
Self::log(warning, message, data)
}
}
type GroundCallback = unsafe extern "C" fn(
location: *const clingo_location_t,
name: *const ::std::os::raw::c_char,
arguments: *const clingo_symbol_t,
arguments_size: usize,
data: *mut ::std::os::raw::c_void,
symbol_callback: clingo_symbol_callback_t,
symbol_callback_data: *mut ::std::os::raw::c_void,
) -> bool;
pub trait GroundEventHandler<T> {
fn on_ground_event(
location: &Location,
name: &str,
arguments: &[Symbol],
data: &mut T,
symbol_callback: clingo_symbol_callback_t,
symbol_callback_data: *mut ::std::os::raw::c_void,
) -> bool;
#[doc(hidden)]
unsafe extern "C" fn unsafe_ground_callback(
location_: *const clingo_location_t,
name_: *const ::std::os::raw::c_char,
arguments_: *const clingo_symbol_t,
arguments_size: usize,
data_: *mut ::std::os::raw::c_void,
symbol_callback: clingo_symbol_callback_t,
symbol_callback_data: *mut ::std::os::raw::c_void,
//TODO wrap symbol call back
) -> bool {
assert!(!location_.is_null());
assert!(!name_.is_null());
assert!(!arguments_.is_null());
assert!(!data_.is_null());
assert!(!symbol_callback_data.is_null());
let location = (location_ as *const Location).as_ref().unwrap();
let c_str = CStr::from_ptr(name_);
let name = c_str.to_str().unwrap();
let arguments = std::slice::from_raw_parts(arguments_ as *const Symbol, arguments_size);
let data = (data_ as *mut T).as_mut().unwrap();
Self::on_ground_event(
location,
name,
arguments,
data,
symbol_callback,
symbol_callback_data,
)
}
}
#[derive(Debug, Copy, Clone)]
pub struct SymbolicLiteral(clingo_symbolic_literal_t);
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Literal(clingo_literal_t);
impl Literal {
pub fn negate(&self) -> Literal {
Literal(-(self.0))
}
pub fn UNSAFE_from(Atom(atom): Atom) -> Literal {
Literal(atom as clingo_literal_t)
}
pub fn get_integer(&self) -> i32 {
self.0
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct WeightedLiteral(clingo_literal_t);
//TODO check impl WeightedLiteral {
// pub fn negate(&self) -> Literal {
// Literal(-(self.0))
// }
// pub fn UNSAFE_from(Atom(atom): Atom) -> Literal {
// Literal(atom as clingo_literal_t)
// }
// pub fn get_integer(&self) -> i32 {
// self.0
// }
// }
#[derive(Debug, Copy, Clone)]
pub struct Atom(clingo_atom_t);
#[derive(Debug, Copy, Clone)]
pub struct Id(clingo_id_t);
impl Id {
pub fn get_integer(&self) -> u32 {
self.0
}
}
#[derive(Debug, Copy, Clone)]
pub struct Location(clingo_location);
#[derive(Debug, Clone)]
pub struct Symbol(clingo_symbol_t);
impl PartialEq for Symbol {
fn eq(&self, other: &Symbol) -> bool {
unsafe { clingo_symbol_is_equal_to(self.0, other.0) }
}
}
impl Eq for Symbol {}
/// Construct a symbol representing a number.
// TODO replace c_int with u32 ?
pub fn create_number(number: c_int) -> Symbol {
let mut symbol = 0 as clingo_symbol_t;
unsafe { clingo_symbol_create_number(number, &mut symbol) };
Symbol(symbol)
}
/// Construct a symbol representing \#sup.
pub fn create_supremum() -> Symbol {
let mut symbol = 0 as clingo_symbol_t;
unsafe { clingo_symbol_create_supremum(&mut symbol) };
Symbol(symbol)
}
/// Construct a symbol representing \#inf
pub fn create_infimum() -> Symbol {
let mut symbol = 0 as clingo_symbol_t;
unsafe { clingo_symbol_create_infimum(&mut symbol) };
Symbol(symbol)
}
/// Construct a symbol representing a string.
///
/// # Errors:
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create_string(string: &str) -> Result<Symbol, &'static str> {
let mut symbol = 0 as clingo_symbol_t;
let c_str = CString::new(string).unwrap();
if unsafe { clingo_symbol_create_string(c_str.as_ptr(), &mut symbol) } {
Ok(Symbol(symbol))
} else {
Err(error_message())
}
}
/// Construct a symbol representing an id.
///
/// **Note:** This is just a shortcut for `create_function()` with
/// empty arguments.
///
/// # Arguments
///
/// * `name` - the name of the symbol
/// * `positive` - whether the symbol has a classical negation sign
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create_id(name: &str, positive: bool) -> Result<Symbol, &'static str> {
let mut symbol = 0 as clingo_symbol_t;
let name_c_str = CString::new(name).unwrap();
if unsafe { clingo_symbol_create_id(name_c_str.as_ptr(), positive, &mut symbol) } {
// println!("create Symbol! sym {} {:?}", symbol, name_c_str);
Ok(Symbol(symbol))
} else {
Err(error_message())
}
}
/// Construct a symbol representing a function or tuple.
///
///
/// **Note:** To create tuples, the empty string has to be used as name.
///
/// # Arguments
///
/// * `name` - the name of the function
/// * `arguments` - the arguments of the function
/// * `arguments_size` - the number of arguments
/// * `positive` - whether the symbol has a classical negation sign
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create_function(
name: &str,
arguments: &[Symbol],
positive: bool,
) -> Result<Symbol, &'static str> {
let mut symbol = 0 as clingo_symbol_t;
let name_c_str = CString::new(name).unwrap();
if unsafe {
clingo_symbol_create_function(
name_c_str.as_ptr(),
arguments.as_ptr() as *const clingo_symbol_t,
arguments.len(),
positive,
&mut symbol,
)
} {
Ok(Symbol(symbol))
} else {
Err(error_message())
}
}
impl Symbol {
/// Get the number of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Number`
pub fn number(&self) -> Result<i32, &'static str> {
let mut number = 0;
if unsafe { clingo_symbol_number(self.0, &mut number) } {
Ok(number)
} else {
Err(error_message())
}
}
/// Get the name of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn name(&self) -> Result<&str, &'static str> {
let mut char_ptr = std::ptr::null() as *const c_char;
if unsafe { clingo_symbol_name(self.0, &mut char_ptr) } {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
Ok(c_str.to_str().unwrap())
} else {
Err(error_message())
}
}
/// Get the string of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::String`
pub fn string(&self) -> Result<&str, &'static str> {
let mut char_ptr = std::ptr::null() as *const c_char;
if unsafe { clingo_symbol_string(self.0, &mut char_ptr) } {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
Ok(c_str.to_str().unwrap())
} else {
Err(error_message())
}
}
/// Check if a function is positive (does not have a sign).
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn is_positive(&self) -> Result<bool, &'static str> {
let mut positive = false;
if unsafe { clingo_symbol_is_positive(self.0, &mut positive) } {
Ok(positive)
} else {
Err(error_message())
}
}
/// Check if a function is negative (has a sign).
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn is_negative(&self) -> Result<bool, &'static str> {
let mut negative = false;
if unsafe { clingo_symbol_is_negative(self.0, &mut negative) } {
Ok(negative)
} else {
Err(error_message())
}
}
/// Get the arguments of a symbol.
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if symbol is not of type `SymbolType::Function`
pub fn arguments(&self) -> Result<Vec<Symbol>, &'static str> {
let mut symbol_ptr = std::ptr::null() as *const clingo_symbol_t;
let mut size: usize = 0;
if unsafe { clingo_symbol_arguments(self.0, &mut symbol_ptr, &mut size) } {
let mut symbols = Vec::<Symbol>::with_capacity(size);
for _ in 0..size {
let nsymbol = unsafe { *symbol_ptr };
symbols.push(Symbol(nsymbol));
symbol_ptr = unsafe { symbol_ptr.offset(1) };
}
Ok(symbols)
} else {
Err(error_message())
}
}
/// Get the type of a symbol.
///
/// # Errors
///
/// - may failed to match clingo symbol type
//TODO maybe unnecesary function in Rust API?
pub fn get_type(&self) -> Result<SymbolType, &'static str> {
let stype = unsafe { clingo_symbol_type(self.0) } as u32;
match stype {
clingo_symbol_type_clingo_symbol_type_infimum => Ok(SymbolType::Infimum),
clingo_symbol_type_clingo_symbol_type_number => Ok(SymbolType::Number),
clingo_symbol_type_clingo_symbol_type_string => Ok(SymbolType::String),
clingo_symbol_type_clingo_symbol_type_function => Ok(SymbolType::Function),
clingo_symbol_type_clingo_symbol_type_supremum => Ok(SymbolType::Supremum),
_ => Err("Rust binding failed to match clingo symbol type"),
}
}
/// Get the string representation of a symbol.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
///
pub fn to_string(&self) -> Option<String> {
let mut size: usize = 0;
let err = unsafe { clingo_symbol_to_string_size(self.0, &mut size) };
if !err {
None
} else {
let a1 = vec![1; size];
let cstring = unsafe { CString::from_vec_unchecked(a1) };
let err =
unsafe { clingo_symbol_to_string(self.0, cstring.as_ptr() as *mut c_char, size) };
if !err {
None
} else {
cstring.into_string().ok()
}
}
}
/// Check if a symbol is less than another symbol.
///
/// Symbols are first compared by type. If the types are equal, the values are
/// compared (where strings are compared using strcmp). Functions are first
/// compared by signature and then lexicographically by arguments.
///
/// # Arguments
///
/// * `a` - first symbol
/// * `b` - second symbol
///
/// **Returns** whether a < b
pub fn is_less_than(&self, other: &Symbol) -> bool {
unsafe { clingo_symbol_is_less_than(self.0, other.0) }
}
/// Calculate a hash code of a symbol.
pub fn hash(&self) -> usize {
unsafe { clingo_symbol_hash(self.0) }
}
}
// struct MaLogger;
// impl Logger<u32> for MaLogger {
//
// fn log(code: Warning, message: &str, data: &mut u32){
// println!("log: {}",message);
// println!("warn: {:?}",code);
// }
// }
/// Parse the given program and return an abstract syntax tree for each statement via a callback.
///
/// # Arguments
///
/// * `program` - the program in gringo syntax
/// * `callback` - the callback reporting statements
/// * `callback_data` - user data for the callback
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if parsing fails
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn parse_program<D, T: AstStatementHandler<D>>(
program_: &str,
_callback: &T,
callback_data: &mut D,
) -> Result<(), &'static str> {
let logger = None;
// let logger = Some(MaLogger::unsafe_logging_callback as LoggingCallback);
let logger_data = std::ptr::null_mut();
let program = CString::new(program_).unwrap();
let data = callback_data as *mut D;
if unsafe {
clingo_parse_program(
program.as_ptr(),
Some(T::unsafe_ast_callback as AstCallback),
data as *mut ::std::os::raw::c_void,
logger,
logger_data,
0,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Parse the given program and return an abstract syntax tree for each statement via a callback.
///
/// # Arguments
///
/// * `program` - the program in gringo syntax
/// * `callback` - the callback reporting statements
/// * `callback_data` - user data for the callback
/// * `logger` - callback to report messages during parsing
/// * `logger_data` - user data for the logger
/// * `message_limit` - the maximum number of times the logger is called
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if parsing fails
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn parse_program_with_logger<CD, C: AstStatementHandler<CD>, LD, L: Logger<LD>>(
program_: &str,
_callback: &C,
cdata_: &mut CD,
_logger: &L,
ldata_: &mut LD,
message_limit: u32,
) -> Result<(), &'static str> {
let callback_data = cdata_ as *mut CD;
let logger_data = ldata_ as *mut LD;
let program = CString::new(program_).unwrap();
if unsafe {
clingo_parse_program(
program.as_ptr(),
Some(C::unsafe_ast_callback as AstCallback),
callback_data as *mut ::std::os::raw::c_void,
Some(L::unsafe_logging_callback as LoggingCallback),
logger_data as *mut ::std::os::raw::c_void,
message_limit,
)
} {
Ok(())
} else {
Err(error_message())
}
}
pub fn create_location(
begin_line: usize,
end_line: usize,
begin_column: usize,
end_column: usize,
begin_file_: &str,
end_file_: &str,
) -> Location {
let begin_file = CString::new(begin_file_).unwrap();
let end_file = CString::new(end_file_).unwrap();
let loc = clingo_location {
begin_line: begin_line,
end_line: end_line,
begin_column: begin_column,
end_column: end_column,
begin_file: begin_file.as_ptr(),
end_file: end_file.as_ptr(),
};
Location(loc)
}
/// Obtain the clingo version.
///
/// `(major version, minor version, revision number)`
pub fn version() -> (i32, i32, i32) {
let mut major = 0;
let mut minor = 0;
let mut revision = 0;
unsafe { clingo_version(&mut major, &mut minor, &mut revision) };
(major, minor, revision)
}
pub struct Part<'a> {
name: CString,
params: &'a [Symbol],
}
impl<'a> Part<'a> {
pub fn new(name: &str, params: &'a [Symbol]) -> Part<'a> {
Part {
name: CString::new(name).unwrap(),
params: params,
}
}
fn from(&self) -> clingo_part {
clingo_part {
name: self.name.as_ptr(),
params: self.params.as_ptr() as *const clingo_symbol_t,
size: self.params.len(),
}
}
}
/// Get the last error code set by a clingo API call.
///
/// **Note:** Each thread has its own local error code.
pub fn error() -> Error {
let code = unsafe { clingo_error_code() };
match code as u32 {
clingo_error_clingo_error_success => Error::Success,
clingo_error_clingo_error_runtime => Error::Runtime,
clingo_error_clingo_error_logic => Error::Logic,
clingo_error_clingo_error_bad_alloc => Error::BadAlloc,
clingo_error_clingo_error_unknown => Error::Unknown,
_ => panic!("Rust binding failed to match clingo error"),
}
}
/// Get the last error message set if an API call fails.
///
/// **Note:** Each thread has its own local error message.
pub fn error_message() -> &'static str {
let char_ptr: *const c_char = unsafe { clingo_error_message() };
if char_ptr.is_null() {
""
} else {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
c_str.to_str().unwrap()
}
}
/// Set a custom error code and message in the active thread.
pub fn set_error(code: Error, message: &str) {
let message_c_str = CString::new(message).unwrap();
unsafe { clingo_set_error(code as clingo_error_t, message_c_str.as_ptr()) }
}
pub struct Propagator(clingo_propagator_t);
pub trait PropagatorBuilder<T> {
fn init(_init: &mut PropagateInit, _data: &mut T) -> bool {
true
}
fn propagate(_control: &mut PropagateControl, _changes: &[Literal], _data: &mut T) -> bool {
true
}
fn undo(_control: &mut PropagateControl, _changes: &[Literal], _data: &mut T) -> bool {
true
}
fn check(_control: &mut PropagateControl, _data: &mut T) -> bool {
true
}
/// Get a Propagator
fn new() -> Propagator {
let prop = clingo_propagator_t {
init: Some(Self::unsafe_init),
propagate: Some(Self::unsafe_propagate),
undo: Some(Self::unsafe_undo),
check: Some(Self::unsafe_check),
};
Propagator(prop)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_init(
init_: *mut clingo_propagate_init_t,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!init_.is_null());
assert!(!data.is_null());
let init = (init_ as *mut PropagateInit).as_mut().unwrap();
let propagator = (data as *mut T).as_mut().unwrap();
Self::init(init, propagator)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_propagate(
control_: *mut clingo_propagate_control_t,
changes_: *const clingo_literal_t,
size: usize,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!control_.is_null());
assert!(!changes_.is_null());
assert!(!data.is_null());
let control = (control_ as *mut PropagateControl).as_mut().unwrap();
let changes = std::slice::from_raw_parts(changes_ as *const Literal, size);
let propagator = (data as *mut T).as_mut().unwrap();
Self::propagate(control, changes, propagator)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_undo(
control_: *mut clingo_propagate_control_t,
changes_: *const clingo_literal_t,
size: usize,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!control_.is_null());
assert!(!changes_.is_null());
assert!(!data.is_null());
let control = (control_ as *mut PropagateControl).as_mut().unwrap();
let changes = std::slice::from_raw_parts(changes_ as *const Literal, size);
let propagator = (data as *mut T).as_mut().unwrap();
Self::undo(control, changes, propagator)
}
#[doc(hidden)]
unsafe extern "C" fn unsafe_check(
control_: *mut clingo_propagate_control_t,
data: *mut ::std::os::raw::c_void,
) -> bool {
assert!(!control_.is_null());
assert!(!data.is_null());
let control = (control_ as *mut PropagateControl).as_mut().unwrap();
let propagator = (data as *mut T).as_mut().unwrap();
Self::check(control, propagator)
}
}
#[derive(Debug)]
pub struct Control {
ctl: Unique<clingo_control_t>,
}
impl Drop for Control {
fn drop(&mut self) {
// println!("drop Control");
unsafe { clingo_control_free(self.ctl.as_ptr()) }
}
}
impl Control {
/// Create a new control object.
///
/// **Note:** Only gringo options (without `--output`) and clasp`s options are supported as
/// arguments, except basic options such as `--help`.
/// Furthermore, a control object is blocked while a search call is active;
/// you must not call any member function during search.
///
/// Messages are printed to stderr.
///
/// # Arguments
///
/// * `arguments` - string array of command line arguments
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if argument parsing fails
pub fn new(
arguments: std::vec::Vec<String>,
message_limit: u32,
) -> Result<Control, &'static str> {
let logger = None;
let logger_data = std::ptr::null_mut();
// create a vector of zero terminated strings
let mut args: Vec<CString> = Vec::new();
for arg in arguments {
args.push(CString::new(arg).unwrap());
}
// convert the strings to raw pointers
let c_args = args.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
let mut ctl = unsafe { mem::uninitialized() };
if unsafe {
clingo_control_new(
c_args.as_ptr(),
c_args.len(),
logger,
logger_data,
message_limit,
&mut ctl,
)
} {
Ok(Control {
ctl: Unique::new(ctl).unwrap(),
})
} else {
Err(error_message())
}
}
/// Create a new control object.
///
/// **Note:** Only gringo options (without <code>\-\-output</code>) and clasp`s options are supported as arguments,
/// except basic options such as <code>\-\-help</code>.
/// Furthermore, a control object is blocked while a search call is active;
/// you must not call any member function during search.
///
/// # Arguments
///
/// * `arguments` - C string array of command line arguments
/// * `logger` - callback functions for warnings and info messages
/// * `logger_data` - user data for the logger callback
/// * `message_limit` - maximum number of times the logger callback is called
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if argument parsing fails
pub fn new_with_logger<D, T: Logger<D>>(
arguments: std::vec::Vec<String>,
_logger: &T,
logger_data: &mut D,
message_limit: u32,
) -> Result<Control, &'static str> {
// create a vector of zero terminated strings
let mut args: Vec<CString> = Vec::new();
for arg in arguments {
args.push(CString::new(arg).unwrap());
}
// convert the strings to raw pointers
let c_args = args.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
let mut ctl = unsafe { mem::uninitialized() };
let data = logger_data as *mut D;
if unsafe {
clingo_control_new(
c_args.as_ptr(),
c_args.len(),
Some(T::unsafe_logging_callback as LoggingCallback),
data as *mut ::std::os::raw::c_void,
message_limit,
&mut ctl,
)
} {
Ok(Control {
ctl: Unique::new(ctl).unwrap(),
})
} else {
Err(error_message())
}
}
//TODO pub fn clingo_control_load(control: *mut Control, file: *const c_char) -> bool;
/// Extend the logic program with the given non-ground logic program in string form.
///
/// This function puts the given program into a block of form: `#program name(parameters).`
///
/// After extending the logic program, the corresponding program parts are typically grounded
/// with `ground()`.
///
/// # Arguments
///
/// * `name` name of the program block
/// * `parameters` string array of parameters of the program block
/// * `program` string representation of the program
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if parsing fails
pub fn add(
&mut self,
name_: &str,
parameters: Vec<&str>,
program_: &str,
) -> Result<(), &'static str> {
let name = CString::new(name_).unwrap();
let name_ptr = name.as_ptr();
let program = CString::new(program_).unwrap();
let program_ptr = program.as_ptr();
let parameters_size = parameters.len();
// create a vector of zero terminated strings
let l_parameters = parameters
.into_iter()
.map(|arg| CString::new(arg).unwrap())
.collect::<Vec<CString>>();
// convert the strings to raw pointers
let c_parameters = l_parameters
.iter()
.map(|arg| arg.as_ptr())
.collect::<Vec<*const c_char>>();
if unsafe {
clingo_control_add(
self.ctl.as_ptr(),
name_ptr,
c_parameters.as_ptr(),
parameters_size,
program_ptr,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Ground the selected [`Part`](struct.Part.html) parts of the current (non-ground) logic program.
///
/// After grounding, logic programs can be solved with `solve()`.
///
/// **Note:** Parts of a logic program without an explicit <tt>\#program</tt>
/// specification are by default put into a program called `base` - without
/// arguments.
///
/// # Arguments
///
/// * `parts` array of parts to ground
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
///
/// **See:** [`Part`](struct.Part.html)
pub fn ground(&mut self, sparts: &[Part]) -> Result<(), &'static str> {
let parts = sparts
.iter()
.map(|arg| arg.from())
.collect::<Vec<clingo_part>>();
let parts_size = sparts.len();
if unsafe {
clingo_control_ground(
self.ctl.as_ptr(),
parts.as_ptr(),
parts_size,
None,
std::ptr::null_mut() as *mut ::std::os::raw::c_void,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Ground the selected [`Part`](struct.Part.html) parts of the current (non-ground) logic
/// program.
///
/// After grounding, logic programs can be solved with `solve()`.
///
/// **Note:** Parts of a logic program without an explicit <tt>\#program</tt>
/// specification are by default put into a program called `base` - without
/// arguments.
///
/// # Arguments
///
/// * `parts` array of parts to ground
/// * `ground_callback` callback to implement external functions
/// * `ground_callback_data` user data for ground_callback
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - error code of ground callback
///
/// **See:** [`Part`](struct.Part.html)
pub fn ground_with_event_handler<D, T: GroundEventHandler<D>>(
&mut self,
sparts: &[Part],
_ground_callback: &T,
ground_callback_data: &mut D,
) -> Result<(), &'static str> {
let parts = sparts
.iter()
.map(|arg| arg.from())
.collect::<Vec<clingo_part>>();
let parts_size = sparts.len();
let data = ground_callback_data as *mut D;
if unsafe {
clingo_control_ground(
self.ctl.as_ptr(),
parts.as_ptr(),
parts_size,
Some(T::unsafe_ground_callback as GroundCallback),
data as *mut ::std::os::raw::c_void,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Solve the currently [`ground()`](struct.Control.html#method.ground) grounded logic program enumerating its models.
///
/// See the [`SolveHandle`](struct.SolveHandle.html) module for more information.
///
/// # Arguments
///
/// * `mode` - configures the search mode
/// * `assumptions` - array of assumptions to solve under
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving could not be started
pub fn solve(
&mut self,
mode: SolveMode,
assumptions: &[SymbolicLiteral],
) -> Result<&mut SolveHandle, &'static str> {
let mut handle = std::ptr::null_mut() as *mut clingo_solve_handle_t;
if unsafe {
clingo_control_solve(
self.ctl.as_ptr(),
mode as clingo_solve_mode_bitset_t,
assumptions.as_ptr() as *const clingo_symbolic_literal_t,
assumptions.len(),
None,
std::ptr::null_mut() as *mut ::std::os::raw::c_void,
&mut handle,
)
} {
unsafe { (handle as *mut SolveHandle).as_mut() }
.ok_or("Rust binding failed to dereference pointer to clingo solve handle")
} else {
Err(error_message())
}
}
/// Solve the currently [`ground()`](struct.Control.html#method.ground) grounded logic program
/// enumerating its models.
///
/// See the [`SolveHandle`](struct.SolveHandle.html) module for more information.
///
/// # Arguments
///
/// * `mode` - configures the search mode
/// * `assumptions` - array of assumptions to solve under
/// * `notify` - the event handler to register
/// * `data` - the user data for the event handler
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving could not be started
pub fn solve_with_event_handler<D, T: SolveEventHandler<D>>(
&mut self,
mode: clingo_solve_mode_bitset_t,
assumptions: &[SymbolicLiteral],
_notify: &T,
data_: &mut D,
) -> Result<&mut SolveHandle, &'static str> {
let mut handle = std::ptr::null_mut() as *mut clingo_solve_handle_t;
let data = data_ as *mut D;
if unsafe {
clingo_control_solve(
self.ctl.as_ptr(),
mode,
assumptions.as_ptr() as *const clingo_symbolic_literal_t,
assumptions.len(),
Some(T::unsafe_solve_callback as SolveEventCallback),
data as *mut ::std::os::raw::c_void,
&mut handle,
)
} {
unsafe { (handle as *mut SolveHandle).as_mut() }
.ok_or("Rust binding failed to dereference pointer to clingo solve handle")
} else {
Err(error_message())
}
}
/// Clean up the domains of clingo`s grounding component using the solving
/// component`s top level assignment.
///
/// This function removes atoms from domains that are false and marks atoms as
/// facts that are true. With multi-shot solving, this can result in smaller
/// groundings because less rules have to be instantiated and more
/// simplifications can be applied.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn cleanup(&mut self) -> Result<(), &'static str> {
if unsafe { clingo_control_cleanup(self.ctl.as_ptr()) } {
Ok(())
} else {
Err(error_message())
}
}
/// Assign a truth value to an external atom.
///
/// If the atom does not exist or is not external, this is a noop.
///
/// # Arguments
///
/// * `atom` atom to assign
/// * `value` - the truth value
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn assign_external(
&mut self,
symbol: &Symbol,
value: TruthValue,
) -> Result<(), &'static str> {
if unsafe {
clingo_control_assign_external(
self.ctl.as_ptr(),
symbol.0,
value as clingo_truth_value_t,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Release an external atom.
///
/// After this call, an external atom is no longer external and subject to
/// program simplifications. If the atom does not exist or is not external,
/// this is a noop.
///
/// # Arguments
///
/// * `atom` - atom to release
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
// pub fn clingo_control_release_external(control: *mut Control,
// atom: clingo_symbol_t)
// -> u8;
/// Register a custom propagator with the control object.
///
/// If the sequential flag is set to true, the propagator is called
/// sequentially when solving with multiple threads.
///
/// See the [`Propagator`](struct.Propagator) module for more information.
///
/// # Arguments
///
/// * `propagator` - the propagator
/// * `data` user data passed to the propagator functions
/// * `sequential` - whether the propagator should be called sequentially
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn register_propagator<D, T: PropagatorBuilder<D>>(
&mut self,
_propagator_builder: &T,
data: &mut D,
sequential: bool,
) -> Result<(), &'static str> {
let propagator = T::new();
let propagator_ptr: *const Propagator = &propagator;
let data_ptr = data as *mut D;
if unsafe {
clingo_control_register_propagator(
self.ctl.as_ptr(),
propagator_ptr as *const clingo_propagator,
data_ptr as *mut ::std::os::raw::c_void,
sequential,
)
} {
Ok(())
} else {
Err(error_message())
}
}
/// Get a statistics object to inspect solver statistics.
///
/// Statistics are updated after a solve call.
///
/// See the [`Statistics`](struct.Statistics.html) module for more information.
///
/// **Attention:**
/// The level of detail of the statistics depends on the stats option
/// (which can be set using [`Configuration`](struct.Configuration.html) module or passed as an
/// option when [`new()`](struct.Control.html#method.new) creating the control object).
/// The default level zero only provides basic statistics,
/// level one provides extended and accumulated statistics,
/// and level two provides per-thread statistics.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn statistics(&mut self) -> Result<&mut Statistics, &'static str> {
let mut stat = std::ptr::null_mut() as *mut clingo_statistics_t;
if unsafe { clingo_control_statistics(self.ctl.as_ptr(), &mut stat) } {
unsafe { (stat as *mut Statistics).as_mut() }
.ok_or("Rust bindings failed to dereference pointer to clingo statistics")
} else {
Err(error_message())
}
}
/// Interrupt the active solve call (or the following solve call right at the beginning).
pub fn interrupt(&mut self) {
unsafe {
clingo_control_interrupt(self.ctl.as_ptr());
}
}
/// Get a configuration object to change the solver configuration.
///
/// See the [`Configuration`](struct.Configuration.html) module for more information.
pub fn configuration(&mut self) -> Option<&mut Configuration> {
let mut conf = std::ptr::null_mut() as *mut clingo_configuration_t;
if unsafe { clingo_control_configuration(self.ctl.as_ptr(), &mut conf) } {
unsafe { (conf as *mut Configuration).as_mut() }
} else {
None
}
}
/// Configure how learnt constraints are handled during enumeration.
///
/// If the enumeration assumption is enabled, then all information learnt from
/// the solver`s various enumeration modes is removed after a solve call. This
/// includes enumeration of cautious or brave consequences, enumeration of
/// answer sets with or without projection, or finding optimal models, as well
/// as clauses added with clingo_solve_control_add_clause().
///
/// **Attention:** For practical purposes, this option is only interesting for single-shot solving
/// or before the last solve call to squeeze out a tiny bit of performance.
/// Initially, the enumeration assumption is enabled.
///
/// # Arguments
///
/// * `enable` - whether to enable the assumption
pub fn use_enumeration_assumption(&mut self, enable: bool) -> Option<()> {
if unsafe { clingo_control_use_enumeration_assumption(self.ctl.as_ptr(), enable) } {
Some(())
} else {
None
}
}
/// Return the symbol for a constant definition of form: <tt>\#const name = symbol</tt>.
///
/// # Arguments
///
/// * `name` - the name of the constant
pub fn get_const(&mut self, name: &str) -> Option<Symbol> {
let c_str_name = CString::new(name).unwrap();
let mut symbol = 0 as clingo_symbol_t;
if unsafe { clingo_control_get_const(self.ctl.as_ptr(), c_str_name.as_ptr(), &mut symbol) }
{
Some(Symbol(symbol))
} else {
None
}
}
/// Check if there is a constant definition for the given constant.
///
/// # Arguments
///
/// * `name` - the name of the constant
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if constant definition does not exist
///
/// **See:** [`Part::get_const()`](struct.Part.html#method.get_const)
pub fn has_const(&mut self, name: &str) -> Result<bool, &'static str> {
let c_str_name = CString::new(name).unwrap();
let mut exist = false;
if unsafe { clingo_control_has_const(self.ctl.as_ptr(), c_str_name.as_ptr(), &mut exist) } {
Ok(exist)
} else {
Err(error_message())
}
}
/// Get an object to inspect symbolic atoms (the relevant Herbrand base) used
/// for grounding.
///
/// See the [`SymbolicAtoms`](struct.SymbolicAtoms.html) module for more information.
pub fn symbolic_atoms(&mut self) -> Option<&mut SymbolicAtoms> {
let mut atoms = std::ptr::null_mut() as *mut clingo_symbolic_atoms_t;
if unsafe { clingo_control_symbolic_atoms(self.ctl.as_ptr(), &mut atoms) } {
unsafe { (atoms as *mut SymbolicAtoms).as_mut() }
} else {
None
}
}
/// Get an object to inspect theory atoms that occur in the grounding.
///
/// See the [`TheoryAtoms`](struct.TheoryAtoms.html) module for more information.
pub fn theory_atoms(&mut self) -> Option<&mut TheoryAtoms> {
let mut atoms = std::ptr::null_mut() as *mut clingo_theory_atoms_t;
if unsafe { clingo_control_theory_atoms(self.ctl.as_ptr(), &mut atoms) } {
unsafe { (atoms as *mut TheoryAtoms).as_mut() }
} else {
None
}
}
/// Get an object to add ground directives to the program.
///
/// See the [`ProgramBuilder`](struct.ProgramBuilder.html) module for more information.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn backend(&mut self) -> Option<&mut Backend> {
let mut backend = std::ptr::null_mut();
if unsafe { clingo_control_backend(self.ctl.as_ptr(), &mut backend) } {
unsafe { (backend as *mut Backend).as_mut() }
} else {
None
}
}
/// Get an object to add non-ground directives to the program.
///
/// See the [`ProgramBuilder`](struct.ProgramBuilder.html) module for more information.
pub fn program_builder(&mut self) -> Option<&mut ProgramBuilder> {
let mut builder = std::ptr::null_mut() as *mut clingo_program_builder_t;
if unsafe { clingo_control_program_builder(self.ctl.as_ptr(), &mut builder) } {
unsafe { (builder as *mut ProgramBuilder).as_mut() }
} else {
None
}
}
}
pub struct ProgramBuilder(clingo_program_builder_t);
impl ProgramBuilder {
/// Begin building a program.
pub fn begin(&mut self) -> Option<()> {
if unsafe { clingo_program_builder_begin(&mut self.0) } {
Some(())
} else {
None
}
}
/// Adds a statement to the program.
///
/// **Attention:** [`begin()`](struct.ProgramBuilder.html#method.begin) must be called before
/// adding statements and [`end()`](struct.ProgramBuilder.html#method.end) must be called after
/// all statements have been added.
///
/// # Arguments
///
/// * `statement` - the statement to add
///
/// # Errors
///
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) for statements of invalid form
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn add(&mut self, statement: &AstStatement) -> Result<(), &'static str> {
let AstStatement(ref stm) = *statement;
if unsafe { clingo_program_builder_add(&mut self.0, stm) } {
Ok(())
} else {
Err(error_message())
}
}
/// End building a program.
pub fn end(&mut self) -> Option<()> {
if unsafe { clingo_program_builder_end(&mut self.0) } {
Some(())
} else {
None
}
}
}
#[derive(Clone, Copy)]
pub struct AstHeadLiteral(clingo_ast_head_literal_t);
#[derive(Clone, Copy)]
pub struct AstBodyLiteral(clingo_ast_body_literal_t);
impl AstBodyLiteral {
pub fn new(
Location(location): Location,
sign: AstSign,
type_: AstBodyLiteralType,
lit_ref: &AstLiteral,
) -> AstBodyLiteral {
let _bg_union_2 = clingo_ast_body_literal__bindgen_ty_1 {
literal: (lit_ref as *const AstLiteral) as *const clingo_ast_literal,
};
AstBodyLiteral(clingo_ast_body_literal_t {
location: location,
sign: sign as clingo_ast_sign_t,
type_: type_ as clingo_ast_body_literal_type_t,
__bindgen_anon_1: _bg_union_2,
})
}
}
#[derive(Clone, Copy)]
pub struct AstRule(clingo_ast_rule_t);
impl AstRule {
pub fn new(AstHeadLiteral(head): AstHeadLiteral, body: &[AstBodyLiteral]) -> AstRule {
let rule = clingo_ast_rule {
head: head,
body: body.as_ptr() as *const clingo_ast_body_literal_t,
size: body.len(),
};
AstRule(rule)
}
pub fn head(&self) -> AstHeadLiteral {
let AstRule(ref rule) = *self;
AstHeadLiteral(rule.head)
}
pub fn body(&self) -> &[AstBodyLiteral] {
let AstRule(ref rule) = *self;
unsafe { std::slice::from_raw_parts(rule.body as *const AstBodyLiteral, rule.size) }
}
pub fn size(&self) -> usize {
let AstRule(ref rule) = *self;
rule.size
}
}
#[derive(Clone, Copy)]
pub struct AstExternal(clingo_ast_external_t);
impl AstExternal {
pub fn new(AstTerm(term): AstTerm, body: &[AstBodyLiteral]) -> AstExternal {
let ext = clingo_ast_external {
atom: term,
body: body.as_ptr() as *const clingo_ast_body_literal_t,
size: body.len(),
};
AstExternal(ext)
}
}
#[derive(Clone)]
pub struct AstStatement(clingo_ast_statement_t);
impl AstStatement {
pub fn new_external(
Location(location): Location,
type_: AstStatementType,
ext: &AstExternal,
) -> AstStatement {
let external: *const AstExternal = ext;
let _bg_union_2 = clingo_ast_statement__bindgen_ty_1 {
external: external as *const clingo_ast_external,
};
let stm = clingo_ast_statement_t {
location: location,
type_: type_ as clingo_ast_statement_type_t,
__bindgen_anon_1: _bg_union_2,
};
AstStatement(stm)
}
pub fn new_rule(Location(location): Location, rule_: &AstRule) -> AstStatement {
let rule: *const AstRule = rule_;
let _bg_union_2 = clingo_ast_statement__bindgen_ty_1 {
rule: rule as *const clingo_ast_rule,
};
let stm = clingo_ast_statement_t {
location: location,
type_: AstStatementType::Rule as clingo_ast_statement_type_t,
__bindgen_anon_1: _bg_union_2,
};
AstStatement(stm)
}
pub fn location(&self) -> Location {
Location(self.0.location)
}
pub fn get_type(&self) -> Result<AstStatementType, &'static str> {
let AstStatement(ref stm) = *self;
match stm.type_ as u32 {
clingo_ast_statement_type_clingo_ast_statement_type_rule => Ok(AstStatementType::Rule),
clingo_ast_statement_type_clingo_ast_statement_type_const => {
Ok(AstStatementType::Const)
}
clingo_ast_statement_type_clingo_ast_statement_type_show_signature => {
Ok(AstStatementType::ShowSignature)
}
clingo_ast_statement_type_clingo_ast_statement_type_show_term => {
Ok(AstStatementType::ShowTerm)
}
clingo_ast_statement_type_clingo_ast_statement_type_minimize => {
Ok(AstStatementType::Minimize)
}
clingo_ast_statement_type_clingo_ast_statement_type_script => {
Ok(AstStatementType::Script)
}
clingo_ast_statement_type_clingo_ast_statement_type_program => {
Ok(AstStatementType::Program)
}
clingo_ast_statement_type_clingo_ast_statement_type_external => {
Ok(AstStatementType::External)
}
clingo_ast_statement_type_clingo_ast_statement_type_edge => Ok(AstStatementType::Edge),
clingo_ast_statement_type_clingo_ast_statement_type_heuristic => {
Ok(AstStatementType::Heuristic)
}
clingo_ast_statement_type_clingo_ast_statement_type_project_atom => {
Ok(AstStatementType::ProjectAtom)
}
clingo_ast_statement_type_clingo_ast_statement_type_project_atom_signature => {
Ok(AstStatementType::ProjectAtomSignature)
}
clingo_ast_statement_type_clingo_ast_statement_type_theory_definition => {
Ok(AstStatementType::TheoryDefinition)
}
_ => Err("Rust binding failed to match clingo ast statement type"),
}
}
pub unsafe fn rule(&self) -> &AstRule {
let AstStatement(ref stm) = *self;
let ast_rule_ptr = stm.__bindgen_anon_1.rule as *const clingo_ast_rule_t;
(ast_rule_ptr as *const AstRule).as_ref().unwrap()
}
}
#[derive(Clone, Copy)]
pub struct AstTerm(clingo_ast_term_t);
impl AstTerm {
pub fn new_symbol(Location(location): Location, Symbol(symbol): Symbol) -> AstTerm {
let _bg_union_1 = clingo_ast_term__bindgen_ty_1 { symbol: symbol };
let term = clingo_ast_term_t {
location: location,
type_: clingo_ast_term_type_clingo_ast_term_type_symbol as clingo_ast_term_type_t,
__bindgen_anon_1: _bg_union_1,
};
AstTerm(term)
}
pub fn location(&self) -> Location {
Location(self.0.location)
}
}
#[derive(Clone, Copy)]
pub struct AstLiteral(clingo_ast_literal_t);
impl AstLiteral {
pub fn new(
Location(location): Location,
sign: AstSign,
type_: AstLiteralType,
sym: &AstTerm,
) -> AstLiteral {
let symbol: *const AstTerm = sym;
let _bg_union_2 = clingo_ast_literal__bindgen_ty_1 {
symbol: symbol as *const clingo_sys::clingo_ast_term,
};
let lit = clingo_ast_literal_t {
location: location,
type_: type_ as clingo_ast_literal_type_t,
sign: sign as clingo_ast_sign_t,
__bindgen_anon_1: _bg_union_2,
};
AstLiteral(lit)
}
}
pub struct Configuration(clingo_configuration_t);
impl Configuration {
/// Get the root key of the configuration.
pub fn root(&mut self) -> Option<Id> {
let mut root_key = 0 as clingo_id_t;
if unsafe { clingo_configuration_root(&mut self.0, &mut root_key) } {
Some(Id(root_key))
} else {
None
}
}
/// Get the type of a key.
// TODO: The type is bitset, an entry can have multiple (but at least one) type.
pub fn configuration_type(&mut self, Id(key): Id) -> Result<ConfigurationType, &'static str> {
let mut ctype = 0 as clingo_configuration_type_bitset_t;
if unsafe { clingo_configuration_type(&mut self.0, key, &mut ctype) } {
match ctype as u32 {
clingo_configuration_type_clingo_configuration_type_value => {
Ok(ConfigurationType::Value)
}
clingo_configuration_type_clingo_configuration_type_array => {
Ok(ConfigurationType::Array)
}
clingo_configuration_type_clingo_configurations_type_map => {
Ok(ConfigurationType::Map)
}
_ => Err("Rust binding failed to match clingo configuration type"),
}
} else {
Err("Rust binding failed to detect clingo configuration type")
}
}
/// Get the description of an entry.
pub fn description(&mut self, Id(key): Id) -> Option<&str> {
let mut description_ptr = unsafe { mem::uninitialized() };
if unsafe {
clingo_configuration_description(
&mut self.0,
key,
&mut description_ptr as *mut *const c_char,
)
} {
let cstr = unsafe { CStr::from_ptr(description_ptr) };
Some(cstr.to_str().unwrap())
} else {
None
}
}
/// Get the size of an array entry.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Array`](enum.ConfigurationType.html#variant.Array).
pub fn array_size(&mut self, Id(key): Id) -> Option<usize> {
let mut size = 0;
if unsafe { clingo_configuration_array_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the subkey at the given offset of an array entry.
///
/// **Note:** Some array entries, like fore example the solver configuration, can be accessed past there actual size to add subentries.
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Array`](enum.ConfigurationType.html#variant.Array).
///
/// # Arguments
///
/// * `key` - the key
/// * `offset` - the offset in the array
pub fn array_at(&mut self, Id(key): Id, offset: usize) -> Option<Id> {
let mut nkey = 0 as clingo_id_t;
if unsafe { clingo_configuration_array_at(&mut self.0, key, offset, &mut nkey) } {
Some(Id(nkey))
} else {
None
}
}
/// Get the number of subkeys of a map entry.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Map`](enum.ConfigurationType.html#variant.Map).
pub fn map_size(&mut self, Id(key): Id) -> Option<usize> {
let mut size = 0;
if unsafe { clingo_configuration_map_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the name associated with the offset-th subkey.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Map`](enum.ConfigurationType.html#variant.Map).
///
/// # Arguments:
///
/// * `key` - the key
/// * `offset` - the offset of the name
pub fn map_subkey_name(&mut self, Id(key): Id, offset: usize) -> Option<&str> {
let mut name_ptr = unsafe { mem::uninitialized() };
if unsafe {
clingo_configuration_map_subkey_name(
&mut self.0,
key,
offset,
&mut name_ptr as *mut *const c_char,
)
} {
let cstr = unsafe { CStr::from_ptr(name_ptr) };
Some(cstr.to_str().unwrap())
} else {
None
}
}
/// Lookup a subkey under the given name.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Map`](enum.ConfigurationType.html#variant.Map).
///
/// **Note:** Multiple levels can be looked up by concatenating keys with a period.
pub fn map_at(&mut self, Id(key): Id, name: &str) -> Option<Id> {
let mut nkey = 0 as clingo_id_t;
let name_c_str = CString::new(name).unwrap();
if unsafe { clingo_configuration_map_at(&mut self.0, key, name_c_str.as_ptr(), &mut nkey) }
{
Some(Id(nkey))
} else {
None
}
}
/// Check whether a entry has a value.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Value`](enum.ConfigurationType.html#variant.Value).
///
/// # Arguments:
///
/// * `key` - the key
pub fn value_is_assigned(&mut self, Id(key): Id) -> Option<bool> {
let mut assigned = false;
if unsafe { clingo_configuration_value_is_assigned(&mut self.0, key, &mut assigned) } {
Some(assigned)
} else {
None
}
}
//NOTTODO obsolete: clingo_configuration_value_get_size(&mut self.0, key, &mut size) }
/// Get the string value of the given entry.
///
/// # Pre-condition
///
/// - The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Value`](enum.ConfigurationType.html#variant.Value).
///
/// # Arguments:
///
/// * `key` - the key
pub fn value_get(&mut self, Id(key): Id) -> Option<&str> {
let mut size = 0;
if unsafe { clingo_configuration_value_get_size(&mut self.0, key, &mut size) } {
let mut value_ptr = unsafe { mem::uninitialized() };
if unsafe { clingo_configuration_value_get(&mut self.0, key, &mut value_ptr, size) } {
let cstr = unsafe { CStr::from_ptr(&value_ptr) };
Some(cstr.to_str().unwrap())
} else {
None
}
} else {
None
}
}
/// Set the value of an entry.
///
/// # Pre-condition
///
/// The [`configuration_type()`](struct.Configuration.html#method.configuration_type) type of the entry must be [`ConfigurationType::Value`](enum.ConfigurationType.html#variant.Value).
///
/// # Arguments
///
/// * `key` - the key
/// * `value` - the value to set
pub fn value_set(&mut self, Id(key): Id, value: &str) -> Option<()> {
let value_c_str = CString::new(value).unwrap();
if unsafe { clingo_configuration_value_set(&mut self.0, key, value_c_str.as_ptr()) } {
Some(())
} else {
None
}
}
}
pub struct Backend(clingo_backend_t);
impl Backend {
/// Add a rule to the program.
///
/// # Arguments
///
/// * `choice` determines if the head is a choice or a disjunction
/// * `head` - the head atoms
/// * `body` - the body literals
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn rule(&mut self, choice: bool, head: &[Atom], body: &[Literal]) -> Result<(), Error> {
if unsafe {
clingo_backend_rule(
&mut self.0,
choice,
head.as_ptr() as *const clingo_atom_t,
head.len(),
body.as_ptr() as *const clingo_literal_t,
body.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add a weight rule to the program.
///
/// **Attention:** All weights and the lower bound must be positive.
///
/// # Arguments
/// * `choice` - determines if the head is a choice or a disjunction
/// * `head` - the head atoms
/// * `lower_bound` - the lower bound of the weight rule
/// * `body` - the weighted body literals
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn weight_rule(
&mut self,
choice: bool,
head: &[Atom],
lower_bound: i32,
body: &[WeightedLiteral],
) -> Result<(), Error> {
if unsafe {
clingo_backend_weight_rule(
&mut self.0,
choice,
head.as_ptr() as *const clingo_atom_t,
head.len(),
lower_bound,
body.as_ptr() as *const clingo_weighted_literal_t,
body.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add a minimize constraint (or weak constraint) to the program.
///
/// # Arguments:
///
/// * `priority` - the priority of the constraint
/// * `literals` - the weighted literals whose sum to minimize
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn minimize(&mut self, priority: i32, literals: &[WeightedLiteral]) -> Result<(), Error> {
if unsafe {
clingo_backend_minimize(
&mut self.0,
priority,
literals.as_ptr() as *const clingo_weighted_literal_t,
literals.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add a projection directive.
///
/// # Arguments:
///
/// * `atoms` - the atoms to project on
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn project(&mut self, atoms: &[Atom]) -> Result<(), Error> {
if unsafe {
clingo_backend_project(
&mut self.0,
atoms.as_ptr() as *const clingo_atom_t,
atoms.len(),
)
} {
Ok(())
} else {
Err(error())
}
}
/// Add an external statement.
///
/// # Arguments:
///
/// * `backend` - the target backend
/// * `atom` - the external atom
/// * `type` - the type of the external statement
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn external(&mut self, atom: &Atom, type_: ExternalType) -> Result<(), Error> {
if unsafe { clingo_backend_external(&mut self.0, atom.0, type_ as clingo_external_type_t) } {
Ok(())
} else {
Err(error())
}
}
/// Add an assumption directive.
///
/// # Arguments
///
/// * `literals` - the literals to assume (positive literals are true and negative literals
/// false for the next solve call)
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn assume(&mut self, literals: &[Literal]) -> Result<(), &'static str> {
let size = literals.len();
if unsafe {
clingo_backend_assume(
&mut self.0,
literals.as_ptr() as *const clingo_literal_t,
size,
)
} {
Ok(())
} else {
Err(error_message())
}
}
//TODO pub fn clingo_backend_heuristic(backend: *mut Backend,
// atom: clingo_atom_t,
// type_: clingo_heuristic_type_t,
// bias: c_int,
// priority: ::std::os::raw::c_uint,
// condition: *const clingo_literal_t,
// size: size_t)
// -> u8;
//TODO pub fn clingo_backend_acyc_edge(backend: *mut Backend,
// node_u: c_int,
// node_v: c_int,
// condition: *const clingo_literal_t,
// size: size_t)
// -> u8;
/// Get a fresh atom to be used in aspif directives.
pub fn add_atom(&mut self) -> Option<Atom> {
let mut atom = 0 as clingo_atom_t;
if unsafe { clingo_backend_add_atom(&mut self.0, &mut atom) } {
Some(Atom(atom))
} else {
None
}
}
}
pub struct Statistics(clingo_statistics_t);
impl Statistics {
/// Get the root key of the statistics.
pub fn root(&mut self) -> Option<u64> {
let mut root_key = 0 as u64;
if unsafe { clingo_statistics_root(&mut self.0, &mut root_key) } {
Some(root_key)
} else {
None
}
}
/// Get the type of a key.
///
/// # Errors
///
/// - Failure to match clingo statistics type
pub fn statistics_type(&mut self, key: u64) -> Result<StatisticsType, &'static str> {
let mut stype = 0 as clingo_statistics_type_t;
if unsafe { clingo_statistics_type(&mut self.0, key, &mut stype) } {
match stype as u32 {
clingo_statistics_type_clingo_statistics_type_empty => Ok(StatisticsType::Empty),
clingo_statistics_type_clingo_statistics_type_value => Ok(StatisticsType::Value),
clingo_statistics_type_clingo_statistics_type_array => Ok(StatisticsType::Array),
clingo_statistics_type_clingo_statistics_type_map => Ok(StatisticsType::Map),
_ => Err("Rust binding failed to match clingo statistics type"),
}
} else {
Err(error_message())
}
}
/// Get the size of an array entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Array`](enum.StatisticsType.html#variant.Array).
pub fn array_size(&mut self, key: u64) -> Option<usize> {
let mut size = 0 as usize;
if unsafe { clingo_statistics_array_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the subkey at the given offset of an array entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Array`](enum.StatisticsType.html#variant.Array).
///
/// # Arguments
///
/// * `key` - the key
/// * `offset` - the offset in the array
pub fn statistics_array_at(&mut self, key: u64, offset: usize) -> Option<u64> {
let mut subkey = 0 as u64;
if unsafe { clingo_statistics_array_at(&mut self.0, key, offset, &mut subkey) } {
Some(subkey)
} else {
None
}
}
/// Get the number of subkeys of a map entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must
/// be [`StatisticsType::Map`](enum.StatisticsType.html#variant.Map).
pub fn map_size(&mut self, key: u64) -> Option<usize> {
let mut size = 0 as usize;
if unsafe { clingo_statistics_map_size(&mut self.0, key, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the name associated with the offset-th subkey.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Map`](enum.StatisticsType.html#variant.Map).
///
/// # Arguments
///
/// * `key` - the key
/// * `offset` - the offset of the name
pub fn map_subkey_name<'a>(&mut self, key: u64, offset: usize) -> Option<&'a str> {
let mut name = std::ptr::null() as *const c_char;
if unsafe { clingo_statistics_map_subkey_name(&mut self.0, key, offset, &mut name) } {
Some(unsafe { CStr::from_ptr(name) }.to_str().unwrap())
} else {
None
}
}
/// Lookup a subkey under the given name.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Map`](enum.StatisticsType.html#variant.Map).
///
/// **Note:** Multiple levels can be looked up by concatenating keys with a period.
///
/// # Arguments
///
/// * `key` - the key
/// * `name` - the name to lookup the subkey
pub fn map_at(&mut self, key: u64, name: &str) -> Option<u64> {
let mut subkey = 0 as u64;
let name_c_str = CString::new(name).unwrap();
if unsafe { clingo_statistics_map_at(&mut self.0, key, name_c_str.as_ptr(), &mut subkey) } {
Some(subkey)
} else {
None
}
}
/// Get the value of the given entry.
///
/// # Pre-condition
///
/// The [statistics type](struct.Statistics.html#method.statistics_type) of the entry must be
/// [`StatisticsType::Value`](enum.StatisticsType.html#variant.Value).
pub fn value_get(&mut self, key: u64) -> Option<f64> {
let mut value = 0.0 as f64;
if unsafe { clingo_statistics_value_get(&mut self.0, key, &mut value) } {
Some(value)
} else {
None
}
}
}
pub struct Signature(clingo_signature_t);
impl Signature {
/// Create a new signature.
///
/// # Arguments
///
/// * `name` name of the signature
/// * `arity` arity of the signature
/// * `positive` false if the signature has a classical negation sign
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn create(name_: &str, arity: u32, positive: bool) -> Result<Signature, &'static str> {
let name_c_str = CString::new(name_).unwrap();
let mut signature = 0;
if unsafe { clingo_signature_create(name_c_str.as_ptr(), arity, positive, &mut signature) }
{
Ok(Signature(signature))
} else {
Err(error_message())
}
}
}
#[derive(Debug)]
pub struct SymbolicAtoms(clingo_symbolic_atoms_t);
impl SymbolicAtoms {
/// Get a forward iterator to the beginning of the sequence of all symbolic
/// atoms optionally restricted to a given signature.
///
/// # Arguments
///
/// * `signature` optional signature
pub fn begin(
&mut self,
opt_sig: Option<&Signature>,
) -> Option<clingo_symbolic_atom_iterator_t> {
match opt_sig {
Some(sig) => {
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_begin(&mut self.0, &sig.0, &mut iterator) } {
Some(iterator)
} else {
None
}
}
None => {
let signature = std::ptr::null();
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_begin(&mut self.0, signature, &mut iterator) } {
Some(iterator)
} else {
None
}
}
}
}
/// Iterator pointing to the end of the sequence of symbolic atoms.
pub fn end(&mut self) -> Option<clingo_symbolic_atom_iterator_t> {
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_end(&mut self.0, &mut iterator) } {
Some(iterator)
} else {
None
}
}
/// Find a symbolic atom given its symbolic representation.
///
/// # Arguments
///
/// * `symbol` - the symbol to lookup
/// * `iterator` iterator pointing to the symbolic atom or to the end
/// of the sequence if no corresponding atom is found
pub fn find(&mut self, Symbol(symbol): Symbol) -> Option<clingo_symbolic_atom_iterator_t> {
let mut iterator = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_find(&mut self.0, symbol, &mut iterator) } {
Some(iterator)
} else {
None
}
}
/// Check if two iterators point to the same element (or end of the sequence).
///
/// # Arguments
///
/// * `a` - the first iterator
/// * `b` - the second iterator
pub fn iterator_is_equal_to(
&mut self,
a: clingo_symbolic_atom_iterator_t,
b: clingo_symbolic_atom_iterator_t,
) -> Option<bool> {
let mut equal = false;
if unsafe { clingo_symbolic_atoms_iterator_is_equal_to(&mut self.0, a, b, &mut equal) } {
Some(equal)
} else {
None
}
}
/// Get the symbolic representation of an atom.
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn symbol(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<Symbol> {
let mut symbol = 0 as clingo_symbol_t;
if unsafe { clingo_symbolic_atoms_symbol(&mut self.0, iterator, &mut symbol) } {
Some(Symbol(symbol))
} else {
None
}
}
/// Check whether an atom is a fact.
///
/// **Note:** This does not determine if an atom is a cautious consequence. The
/// grounding or solving component`s simplifications can only detect this in
/// some cases.
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn is_fact(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<bool> {
let mut fact = false;
if unsafe { clingo_symbolic_atoms_is_fact(&mut self.0, iterator, &mut fact) } {
Some(fact)
} else {
None
}
}
/// Check whether an atom is external.
///
/// An atom is external if it has been defined using an external directive and
/// has not been released or defined by a rule.
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn is_external(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<bool> {
let mut external = false;
if unsafe { clingo_symbolic_atoms_is_external(&mut self.0, iterator, &mut external) } {
Some(external)
} else {
None
}
}
/// Returns the (numeric) aspif literal corresponding to the given symbolic atom.
///
/// Such a literal can be mapped to a solver literal (see the \ref Propagator
/// module) or be used in rules in aspif format (see the \ref ProgramBuilder
/// module).
///
/// # Arguments
///
/// * `iterator` iterator to the atom
pub fn literal(&mut self, iterator: clingo_symbolic_atom_iterator_t) -> Option<Literal> {
let mut literal = 0 as clingo_literal_t;
if unsafe { clingo_symbolic_atoms_literal(&mut self.0, iterator, &mut literal) } {
Some(Literal(literal))
} else {
None
}
}
//TODO pub fn clingo_symbolic_atoms_signatures_size(atoms: *mut SymbolicAtoms,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_symbolic_atoms_signatures(atoms: *mut SymbolicAtoms,
// signatures: *mut clingo_signature_t,
// size: size_t)
// -> u8;
/// Get an iterator to the next element in the sequence of symbolic atoms.
///
/// # Arguments
///
/// * `iterator` - the current iterator
pub fn next(
&mut self,
iterator: clingo_symbolic_atom_iterator_t,
) -> Option<clingo_symbolic_atom_iterator_t> {
let mut next = 0 as clingo_symbolic_atom_iterator_t;
if unsafe { clingo_symbolic_atoms_next(&mut self.0, iterator, &mut next) } {
Some(next)
} else {
None
}
}
//TODO pub fn clingo_symbolic_atoms_is_valid(atoms: *mut SymbolicAtoms,
// iterator: clingo_symbolic_atom_iterator_t,
// valid: *mut u8)
// -> u8;
}
pub struct TheoryAtoms(clingo_theory_atoms_t);
impl TheoryAtoms {
//TODO pub fn clingo_theory_atoms_term_type(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// type_: *mut clingo_theory_term_type_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_term_number(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// number: *mut c_int)
// -> u8;
/// Get the name of the given constant or function theory term.
///
/// # Pre-condition
///
///TODO The term must be of type ::clingo_theory_term_type_function or ::clingo_theory_term_type_symbol.
///
/// # Arguments
///
/// * `term` id of the term
pub fn term_name<'a>(&mut self, Id(term): Id) -> Option<&'a str> {
let mut char_ptr = std::ptr::null() as *const c_char;
if unsafe { clingo_theory_atoms_term_name(&mut self.0, term, &mut char_ptr) } {
let c_str = unsafe { CStr::from_ptr(char_ptr) };
Some(c_str.to_str().unwrap())
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_term_arguments(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// arguments: *mut *const clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_term_to_string_size(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_term_to_string(atoms: *mut TheoryAtoms,
// term: clingo_id_t,
// string: *mut c_char,
// size: size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_tuple(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// tuple: *mut *const clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_condition(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// condition: *mut *const clingo_literal_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_condition_id(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// condition: *mut clingo_literal_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_to_string_size(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_element_to_string(atoms: *mut TheoryAtoms,
// element: clingo_id_t,
// string: *mut c_char,
// size: size_t)
// -> u8;
/// Get the total number of theory atoms.
pub fn size(&mut self) -> Option<usize> {
let mut size = 0 as usize;
if unsafe { clingo_theory_atoms_size(&mut self.0, &mut size) } {
Some(size)
} else {
None
}
}
/// Get the theory term associated with the theory atom.
///
/// # Arguments
///
/// * `atom` id of the atom
pub fn atom_term(&mut self, Id(atom): Id) -> Option<Id> {
let mut term = 0 as clingo_id_t;
if unsafe { clingo_theory_atoms_atom_term(&mut self.0, atom, &mut term) } {
Some(Id(term))
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_atom_elements(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// elements: *mut *const clingo_id_t,
// size: *mut size_t)
// -> u8;
/// Whether the theory atom has a guard.
///
/// # Arguments
///
/// * `atom` id of the atom
pub fn atom_has_guard(&mut self, Id(atom): Id) -> Option<bool> {
let mut has_guard = false;
if unsafe { clingo_theory_atoms_atom_has_guard(&mut self.0, atom, &mut has_guard) } {
Some(has_guard)
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_atom_guard(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// connective: *mut *const c_char,
// term: *mut clingo_id_t)
// -> u8;
/// Get the aspif literal associated with the given theory atom.
///
/// # Arguments
///
/// * `atom` id of the atom
pub fn atom_literal(&mut self, Id(atom): Id) -> Option<Literal> {
let mut literal = 0 as clingo_literal_t;
if unsafe { clingo_theory_atoms_atom_literal(&mut self.0, atom, &mut literal) } {
Some(Literal(literal))
} else {
None
}
}
//TODO pub fn clingo_theory_atoms_atom_to_string_size(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// size: *mut size_t)
// -> u8;
//TODO pub fn clingo_theory_atoms_atom_to_string(atoms: *mut TheoryAtoms,
// atom: clingo_id_t,
// string: *mut c_char,
// size: size_t)
// -> u8;
}
pub struct UNSAFE_TheoryAtomsIterator {
count: usize,
size: usize,
}
impl Iterator for UNSAFE_TheoryAtomsIterator {
type Item = Id;
fn next(&mut self) -> Option<Id> {
// increment our count. This is why we started at zero.
self.count += 1;
// check to see if we've finished counting or not.
if self.count < self.size {
Some(Id(self.count as u32))
} else {
None
}
}
}
impl UNSAFE_TheoryAtomsIterator {
pub fn from(cta: &mut TheoryAtoms) -> UNSAFE_TheoryAtomsIterator {
UNSAFE_TheoryAtomsIterator {
count: 0,
size: cta.size().unwrap(),
}
}
}
pub struct Model(clingo_model_t);
impl Model {
/// Get the type of the model.
pub fn model_type(&mut self) -> Option<clingo_model_type_t> {
let mut mtype = 0 as clingo_model_type_t;
if unsafe { clingo_model_type(&mut self.0, &mut mtype) } {
Some(mtype)
} else {
None
}
}
/// Get the running number of the model.
pub fn number(&mut self) -> Option<u64> {
let mut number = 0;
if unsafe { clingo_model_number(&mut self.0, &mut number) } {
Some(number)
} else {
None
}
}
//NOTTODO pub fn clingo_model_symbols_size(model: *mut Model,
// show: clingo_show_type_bitset_t,
// size: *mut size_t)
// -> bool;
/// Get the symbols of the selected types in the model.
///
/// **Note:** CSP assignments are represented using functions with name "$"
/// where the first argument is the name of the CSP variable and the second one its
/// value.
///
/// # Arguments
///
/// * `show` - which symbols to select
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if the size is too small
pub fn symbols(
&mut self,
show: clingo_show_type_bitset_t,
) -> Result<Vec<Symbol>, &'static str> {
let Model(ref mut model) = *self;
let mut size: usize = 0;
let size_p = &mut size as *mut usize;
if unsafe { clingo_model_symbols_size(model, show, size_p) } {
let symbols = Vec::<Symbol>::with_capacity(size);
let symbols_ptr = symbols.as_ptr();
if unsafe {
clingo_model_symbols(
model,
show as clingo_show_type_bitset_t,
symbols_ptr as *mut clingo_symbol_t,
size,
)
} {
let symbols_ref =
unsafe { std::slice::from_raw_parts(symbols_ptr as *const Symbol, size) };
Ok(symbols_ref.to_owned())
} else {
Err(error_message())
}
} else {
Err(error_message())
}
}
//TODO pub fn clingo_model_contains(model: *mut Model,
// atom: clingo_symbol_t,
// contained: *mut u8)
// -> u8;
//TODO pub fn clingo_model_cost_size(model: *mut Model, size: *mut size_t) -> u8;
//TODO pub fn clingo_model_cost(model: *mut Model, costs: *mut int64_t, size: size_t) -> u8;
//TODO pub fn clingo_model_optimality_proven(model: *mut Model, proven: *mut u8) -> u8;
//TODO pub fn clingo_model_context(model: *mut Model,
// control: *mut *mut SolveControl)
// -> u8;
}
pub struct SolveControl(clingo_solve_control_t);
impl SolveControl {
/// Add a clause that applies to the current solving step during model
/// enumeration.
///
/// **Note:** The [`Propagator`](enum.Propagator.html) module provides a more sophisticated
/// interface to add clauses - even on partial assignments.
///
/// # Arguments
///
/// * `clause` array of literals representing the clause
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if adding the clause fails
pub fn add_clause(&mut self, clause: &[Literal]) -> Result<(), &'static str> {
if unsafe {
clingo_solve_control_add_clause(
&mut self.0,
clause.as_ptr() as *const clingo_literal_t,
clause.len(),
)
} {
Ok(())
} else {
Err(error_message())
}
}
}
pub struct PropagateControl(clingo_propagate_control_t);
impl PropagateControl {
/// Get the id of the underlying solver thread.
///
/// Thread ids are consecutive numbers starting with zero.
pub fn thread_id(&mut self) -> u32 {
unsafe { clingo_propagate_control_thread_id(&mut self.0) }
}
//TODO pub fn clingo_propagate_control_assignment(control: *mut PropagateControl)
// -> *mut clingo_assignment_t;
/// Add the given clause to the solver.
///
/// This method sets its result to false if the current propagation must be stopped for the solver to backtrack.
///
/// **Attention:** No further calls on the control object or functions on the assignment should be called when the result of this method is false.
///
/// # Arguments
///
/// * `clause` - the clause to add
/// * `type` - the clause type determining its lifetime
///
/// **Returns** result indicating whether propagation has to be stopped
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn add_clause(
&mut self,
clause: &[Literal],
type_: ClauseType,
) -> Result<bool, &'static str> {
let mut result = false;
if unsafe {
clingo_propagate_control_add_clause(
&mut self.0,
clause.as_ptr() as *const clingo_literal_t,
clause.len(),
type_ as clingo_clause_type_t,
&mut result,
)
} {
Ok(result)
} else {
Err(error_message())
}
}
/// Propagate implied literals (resulting from added clauses).
///
/// This method sets its result to false if the current propagation must be stopped for the
/// solver to backtrack.
///
/// **Attention:** No further calls on the control object or functions on the assignment should
/// be called when the result of this method is false.
///
/// **Returns** result indicating whether propagation has to be stopped
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
pub fn propagate(&mut self) -> Result<bool, &'static str> {
let mut result = false;
if unsafe { clingo_propagate_control_propagate(&mut self.0, &mut result) } {
Ok(result)
} else {
Err(error_message())
}
}
}
pub struct PropagateInit(clingo_propagate_init_t);
impl PropagateInit {
/// Map the given program literal or condition id to its solver literal.
///
/// # Arguments
///
/// * `aspif_literal` - the aspif literal to map
///
/// **Returns** the corresponding solver literal
pub fn solver_literal(&mut self, Literal(aspif_literal): Literal) -> Option<Literal> {
let mut solver_literal = 0 as clingo_literal_t;
if unsafe {
clingo_propagate_init_solver_literal(&mut self.0, aspif_literal, &mut solver_literal)
} {
Some(Literal(solver_literal))
} else {
None
}
}
/// Add a watch for the solver literal in the given phase.
///
/// # Arguments
///
/// * `solver_literal` - the solver literal
pub fn add_watch(&mut self, Literal(solver_literal): Literal) -> Option<()> {
if unsafe { clingo_propagate_init_add_watch(&mut self.0, solver_literal) } {
Some(())
} else {
None
}
}
/// Get an object to inspect the symbolic atoms.
pub fn symbolic_atoms<'a>(&mut self) -> Option<&'a mut SymbolicAtoms> {
let mut atoms_ptr = std::ptr::null_mut();
if unsafe { clingo_propagate_init_symbolic_atoms(&mut self.0, &mut atoms_ptr) } {
unsafe { (atoms_ptr as *mut SymbolicAtoms).as_mut() }
} else {
None
}
}
//TODO pub fn clingo_propagate_init_theory_atoms(init: &mut PropagateInit,
// atoms: *mut *mut TheoryAtoms)
// -> bool;
/// Get the number of threads used in subsequent solving.
/// **See:** [`PropagateControl::thread_id()`](struct.PropagateControl.html#method.thread_id)
pub fn number_of_threads(&mut self) -> usize {
(unsafe { clingo_propagate_init_number_of_threads(&mut self.0) } as usize)
}
}
pub struct SolveHandle(clingo_solve_handle);
impl SolveHandle {
/// Get the next solve result.
///
/// Blocks until the result is ready.
/// When yielding partial solve results can be obtained, i.e.,
/// when a model is ready, the result will be satisfiable but neither the search exhausted nor the optimality proven.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn get(&mut self) -> Result<clingo_solve_result_bitset_t, &'static str> {
let mut result = 0;
if unsafe { clingo_solve_handle_get(&mut self.0, &mut result) } {
Ok(result)
} else {
Err(error_message())
}
}
/// Get the next model (or zero if there are no more models).
/// (it is NULL if there are no more models)
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn model(&mut self) -> Result<&mut Model, &'static str> {
let SolveHandle(ref mut handle) = *self;
let mut model = std::ptr::null_mut() as *mut clingo_model_t;
if unsafe { clingo_solve_handle_model(handle, &mut model) } {
unsafe { (model as *mut Model).as_mut() }
.ok_or("Rust binding failed to dereference pointer to clingo model")
} else {
Err(error_message())
}
}
/// Discards the last model and starts the search for the next one.
///
/// If the search has been started asynchronously, this function continues the search in the background.
///
/// **Note:** This function does not block.
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn resume(&mut self) -> Result<(), &'static str> {
let SolveHandle(ref mut handle) = *self;
if unsafe { clingo_solve_handle_resume(handle) } {
Ok(())
} else {
Err(error_message())
}
}
/// Stops the running search and releases the handle.
///
/// Blocks until the search is stopped (as if an implicit cancel was called before the handle is
/// released).
///
/// # Errors
///
/// - [`Error::BadAlloc`](enum.Error.html#variant.BadAlloc)
/// - [`Error::Runtime`](enum.Error.html#variant.Runtime) if solving fails
pub fn close(&mut self) -> Result<(), &'static str> {
let SolveHandle(ref mut handle) = *self;
if unsafe { clingo_solve_handle_close(handle) } {
Ok(())
} else {
Err(error_message())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn version_test() {
let (ma, mi, re) = version();
assert!(ma == 5);
assert!(mi == 2);
assert!(re == 2);
}
#[test]
fn parse_program_test() {
let mut sym = create_number(42);
assert!(42 == sym.number().unwrap());
sym = create_infimum();
assert!(SymbolType::Infimum == sym.get_type().unwrap());
}
}
|
// (c) 2016 Joost Yervante Damad <joost@productize.be>
use std::fs::File;
use std::io::Read;
pub type ERes<T> = Result<T, String>;
pub fn err<T>(msg: &str) -> ERes<T> {
Err(String::from(msg))
}
macro_rules! fail {
($expr:expr) => (
return Err(::std::error::FromError::from_error($expr));
)
}
fn read_file(name: &str) -> ERes<String> {
let mut f = try!(match File::open(name) {
Ok(f) => Ok(f),
Err(err) => Err(format!("open error in file {}: {}", name, err))
});
let mut s = String::new();
match f.read_to_string(&mut s) {
Ok(_) => Ok(s),
Err(err) => Err(format!("read error in file {}: {}", name, err))
}
}
pub mod footprint;
pub mod schematic;
slightly enhance error msg
// (c) 2016 Joost Yervante Damad <joost@productize.be>
use std::fs::File;
use std::io::Read;
pub type ERes<T> = Result<T, String>;
pub fn err<T>(msg: &str) -> ERes<T> {
Err(String::from(msg))
}
macro_rules! fail {
($expr:expr) => (
return Err(::std::error::FromError::from_error($expr));
)
}
fn read_file(name: &str) -> ERes<String> {
let mut f = try!(match File::open(name) {
Ok(f) => Ok(f),
Err(err) => Err(format!("open error in file '{}': {}", name, err))
});
let mut s = String::new();
match f.read_to_string(&mut s) {
Ok(_) => Ok(s),
Err(err) => Err(format!("read error in file '{}': {}", name, err))
}
}
pub mod footprint;
pub mod schematic;
|
use super::ffi::bindgen;
use std::{mem, ptr};
use libc;
type FfiType_ = *mut bindgen::ffi_type;
type FfiTypeArray_ = *mut FfiType_;
#[derive(Debug)]
pub struct FfiType(FfiType_);
#[derive(Debug)]
pub struct FfiTypeArray(FfiTypeArray_);
// /// Finds the length of a null-terminated C array of pointers.
// unsafe fn arrayz_len<T>(array: *mut *mut T) -> usize {
// let mut count = 0;
// for i in 0 .. {
// let element = array.offset(i);
// if (*element).is_null() {
// break;
// }
// count += 1;
// }
// return count;
// }
/// Creates a null-terminated array of FfiType_. Takes ownership of
/// the elements.
unsafe fn ffi_type_array_create(elements: &[FfiType_]) -> FfiTypeArray_ {
let size = elements.len();
let array = libc::malloc((size+1) * mem::size_of::<FfiType_>())
as FfiTypeArray_;
for i in 0 .. size {
*array.offset(i as isize) = elements[i];
}
*array.offset(size as isize) = ptr::null::<bindgen::ffi_type>() as FfiType_;
println!("ffi_type_array_create({:?}) = {:?}", elements, array);
array
}
/// Creates a struct ffi_type with the given elements. Takes ownership
/// of the elements.
unsafe fn ffi_type_struct_create(elements: &[FfiType_]) -> FfiType_ {
println!("ffi_type_array_create({:?})", elements);
let array = ffi_type_array_create(elements);
let ffi_type = libc::malloc(mem::size_of::<bindgen::ffi_type>())
as FfiType_;
(*ffi_type).size = 0;
(*ffi_type).alignment = 0;
(*ffi_type).type_ = bindgen::ffi_type_enum::STRUCT as u16;
(*ffi_type).elements = array;
println!("ffi_type_array_create({:?}) = {:?}", elements, ffi_type);
ffi_type
}
/// Destroys an array of FfiType_ and all of its elements.
unsafe fn ffi_type_array_destroy(ffi_types: FfiTypeArray_) {
println!("ffi_type_array_destroy({:?})", ffi_types);
let mut current = ffi_types;
while !(*current).is_null() {
ffi_type_destroy(*current);
current = current.offset(1);
}
libc::free(ffi_types as *mut libc::c_void);
}
/// Destroys an FfiType_ if it was dynamically allocated.
unsafe fn ffi_type_destroy(ffi_type: FfiType_) {
println!("ffi_type_destroy({:?})", ffi_type);
if ffi_type.is_null() { return }
let type_ = (*ffi_type).type_;
if type_ == bindgen::ffi_type_enum::STRUCT as u16 {
ffi_type_array_destroy((*ffi_type).elements);
libc::free(ffi_type as *mut libc::c_void);
}
}
// /// Marshalls an array of ffi_type*s as a vector. Dropping the vector will
// /// free the array.
// unsafe fn arrayz_to_vec(array: *mut *mut bindgen::ffi_type) -> Vec<FfiType>
// {
// let size = arrayz_len(array);
// mem::transmute(Vec::from_raw_parts(array, size + 1, size + 1))
// }
impl Drop for FfiType {
fn drop(&mut self) {
unsafe { ffi_type_destroy(self.0) }
}
}
// impl fmt::Debug for FfiType {
// fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
// if self.0.is_null() {
// return formatter.write_str("null");
// }
// let ffi_type = unsafe { &*self.0 };
// if ffi_type.type_ == bindgen::ffi_type_enum::STRUCT as u16 {
// let vec = unsafe { arrayz_to_vec(ffi_type.elements) };
// try!(vec.fmt(formatter));
// mem::forget(vec);
// } else {
// try!(ffi_type.type_.fmt(formatter));
// }
// Ok(())
// }
// }
// impl Clone for FfiType {
// fn clone(&self) -> Self {
// if self.0.is_null() {
// return FfiType(self.0)
// }
// let ffi_type = unsafe { &*self.0 };
// if ffi_type.type_ == bindgen::ffi_type_enum::STRUCT as u16 {
// let vec = unsafe { arrayz_to_vec(ffi_type.elements) };
// let mut copy = vec.clone();
// mem::forget(vec);
// copy.pop();
// FfiType::structure(copy)
// } else {
// FfiType(self.0)
// }
// }
// }
impl FfiType {
pub fn void() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_void })
}
pub fn uint8() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint8 })
}
pub fn sint8() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint8 })
}
pub fn uint16() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint16 })
}
pub fn sint16() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint16 })
}
pub fn uint32() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint32 })
}
pub fn sint32() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint32 })
}
pub fn uint64() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint64 })
}
pub fn sint64() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint64 })
}
pub fn float() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_float })
}
pub fn double() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_double })
}
pub fn pointer() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_pointer })
}
pub fn longdouble() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_longdouble })
}
pub fn complex_float() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_complex_float })
}
pub fn complex_double() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_complex_double })
}
pub fn complex_longdouble() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_complex_longdouble })
}
pub fn structure(fields: Vec<FfiType>) -> Self {
println!("FfiType::structure({:?})", fields);
let fields: Vec<FfiType_> = fields.into_iter().map(|t| t.0).collect();
unsafe {
FfiType(ffi_type_struct_create(fields.as_slice()))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn create_uint64() {
let t = FfiType::uint64();
// assert_eq!("11", format!("{:?}", t));
}
#[test]
fn create_struct() {
let t = FfiType::structure(vec![FfiType::sint64(),
FfiType::sint64(),
FfiType::uint64()]);
// assert_eq!("[12, null]", format!("{:?}", t));
::std::mem::forget(t);
}
}
Got rid of double-drop bug.
use super::ffi::bindgen;
use std::{mem, ptr};
use libc;
type FfiType_ = *mut bindgen::ffi_type;
type FfiTypeArray_ = *mut FfiType_;
#[derive(Debug)]
pub struct FfiType(FfiType_);
#[derive(Debug)]
pub struct FfiTypeArray(FfiTypeArray_);
// /// Finds the length of a null-terminated C array of pointers.
// unsafe fn arrayz_len<T>(array: *mut *mut T) -> usize {
// let mut count = 0;
// for i in 0 .. {
// let element = array.offset(i);
// if (*element).is_null() {
// break;
// }
// count += 1;
// }
// return count;
// }
/// Creates a null-terminated array of FfiType_. Takes ownership of
/// the elements.
unsafe fn ffi_type_array_create(elements: Vec<FfiType>) -> FfiTypeArray_ {
let size = elements.len();
let array = libc::malloc((size+1) * mem::size_of::<FfiType_>())
as FfiTypeArray_;
for i in 0 .. size {
*array.offset(i as isize) = elements[i].0;
}
*array.offset(size as isize) = ptr::null::<bindgen::ffi_type>() as FfiType_;
for t in elements {
mem::forget(t);
}
println!("ffi_type_array_create(...) = {:?}", array);
array
}
/// Creates a struct ffi_type with the given elements. Takes ownership
/// of the elements.
unsafe fn ffi_type_struct_create(elements: Vec<FfiType>) -> FfiType_ {
println!("ffi_type_array_create({:?})", elements);
let array = ffi_type_array_create(elements);
let ffi_type = libc::malloc(mem::size_of::<bindgen::ffi_type>())
as FfiType_;
(*ffi_type).size = 0;
(*ffi_type).alignment = 0;
(*ffi_type).type_ = bindgen::ffi_type_enum::STRUCT as u16;
(*ffi_type).elements = array;
println!("ffi_type_array_create(...) = {:?}", ffi_type);
ffi_type
}
/// Destroys an array of FfiType_ and all of its elements.
unsafe fn ffi_type_array_destroy(ffi_types: FfiTypeArray_) {
println!("ffi_type_array_destroy({:?})", ffi_types);
let mut current = ffi_types;
while !(*current).is_null() {
ffi_type_destroy(*current);
current = current.offset(1);
}
libc::free(ffi_types as *mut libc::c_void);
}
/// Destroys an FfiType_ if it was dynamically allocated.
unsafe fn ffi_type_destroy(ffi_type: FfiType_) {
println!("ffi_type_destroy({:?})", ffi_type);
if (*ffi_type).type_ == bindgen::ffi_type_enum::STRUCT as u16 {
ffi_type_array_destroy((*ffi_type).elements);
libc::free(ffi_type as *mut libc::c_void);
}
}
// /// Marshalls an array of ffi_type*s as a vector. Dropping the vector will
// /// free the array.
// unsafe fn arrayz_to_vec(array: *mut *mut bindgen::ffi_type) -> Vec<FfiType>
// {
// let size = arrayz_len(array);
// mem::transmute(Vec::from_raw_parts(array, size + 1, size + 1))
// }
impl Drop for FfiType {
fn drop(&mut self) {
unsafe { ffi_type_destroy(self.0) }
}
}
// impl fmt::Debug for FfiType {
// fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
// if self.0.is_null() {
// return formatter.write_str("null");
// }
// let ffi_type = unsafe { &*self.0 };
// if ffi_type.type_ == bindgen::ffi_type_enum::STRUCT as u16 {
// let vec = unsafe { arrayz_to_vec(ffi_type.elements) };
// try!(vec.fmt(formatter));
// mem::forget(vec);
// } else {
// try!(ffi_type.type_.fmt(formatter));
// }
// Ok(())
// }
// }
// impl Clone for FfiType {
// fn clone(&self) -> Self {
// if self.0.is_null() {
// return FfiType(self.0)
// }
// let ffi_type = unsafe { &*self.0 };
// if ffi_type.type_ == bindgen::ffi_type_enum::STRUCT as u16 {
// let vec = unsafe { arrayz_to_vec(ffi_type.elements) };
// let mut copy = vec.clone();
// mem::forget(vec);
// copy.pop();
// FfiType::structure(copy)
// } else {
// FfiType(self.0)
// }
// }
// }
impl FfiType {
pub fn void() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_void })
}
pub fn uint8() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint8 })
}
pub fn sint8() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint8 })
}
pub fn uint16() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint16 })
}
pub fn sint16() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint16 })
}
pub fn uint32() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint32 })
}
pub fn sint32() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint32 })
}
pub fn uint64() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_uint64 })
}
pub fn sint64() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_sint64 })
}
pub fn float() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_float })
}
pub fn double() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_double })
}
pub fn pointer() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_pointer })
}
pub fn longdouble() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_longdouble })
}
pub fn complex_float() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_complex_float })
}
pub fn complex_double() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_complex_double })
}
pub fn complex_longdouble() -> Self {
FfiType(unsafe { &mut bindgen::ffi_type_complex_longdouble })
}
pub fn structure(fields: Vec<FfiType>) -> Self {
println!("FfiType::structure({:?})", fields);
unsafe {
FfiType(ffi_type_struct_create(fields))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn create_uint64() {
let t = FfiType::uint64();
// assert_eq!("11", format!("{:?}", t));
}
#[test]
fn create_struct() {
let t = FfiType::structure(vec![FfiType::sint64(),
FfiType::sint64(),
FfiType::uint64()]);
// assert_eq!("[12, null]", format!("{:?}", t));
::std::mem::forget(t);
}
}
|
use std::collections::HashMap;
use std::fs;
use std::io::Write;
use std::path;
use failure::ResultExt;
use jsonfeed;
use jsonfeed::Feed;
use liquid;
use rss;
use sitemap::writer::SiteMapWriter;
use crate::cobalt_model;
use crate::cobalt_model::files;
use crate::cobalt_model::permalink;
use crate::cobalt_model::Collection;
use crate::cobalt_model::{Config, Minify, SortOrder};
use crate::document::{Document, RenderContex};
use crate::error::*;
use crate::pagination;
struct Context {
pub source: path::PathBuf,
pub destination: path::PathBuf,
pub pages: cobalt_model::Collection,
pub posts: cobalt_model::Collection,
pub site: liquid::Object,
pub layouts: HashMap<String, String>,
pub liquid: cobalt_model::Liquid,
pub markdown: cobalt_model::Markdown,
pub assets: cobalt_model::Assets,
pub sitemap: Option<String>,
pub minify: Minify,
}
impl Context {
fn with_config(config: Config) -> Result<Self> {
let Config {
source,
destination,
pages,
posts,
site,
layouts_dir,
liquid,
markdown,
assets,
sitemap,
minify,
} = config;
let pages = pages.build()?;
let posts = posts.build()?;
let site = site.build()?;
let liquid = liquid.build()?;
let markdown = markdown.build();
let assets = assets.build()?;
let layouts = find_layouts(&layouts_dir)?;
let layouts = parse_layouts(&layouts);
let context = Context {
source,
destination,
pages,
posts,
site,
layouts,
liquid,
markdown,
assets,
sitemap,
minify,
};
Ok(context)
}
}
/// The primary build function that transforms a directory into a site
pub fn build(config: Config) -> Result<()> {
let context = Context::with_config(config)?;
let post_files = &context.posts.pages;
let mut posts = parse_pages(post_files, &context.posts, &context.source)?;
if let Some(ref drafts) = context.posts.drafts {
let drafts_root = drafts.subtree();
parse_drafts(drafts_root, drafts, &mut posts, &context.posts)?;
}
let page_files = &context.pages.pages;
let documents = parse_pages(page_files, &context.pages, &context.source)?;
sort_pages(&mut posts, &context.posts)?;
generate_posts(&mut posts, &context)?;
// check if we should create an RSS file and create it!
if let Some(ref path) = context.posts.rss {
create_rss(path, &context.destination, &context.posts, &posts)?;
}
// check if we should create an jsonfeed file and create it!
if let Some(ref path) = context.posts.jsonfeed {
create_jsonfeed(path, &context.destination, &context.posts, &posts)?;
}
if let Some(ref path) = context.sitemap {
let sitemap_path = &context.destination.join(path);
create_sitemap(
&sitemap_path,
&context.posts,
&posts,
&context.pages,
&documents,
)?;
}
generate_pages(posts, documents, &context)?;
// copy all remaining files in the source to the destination
// compile SASS along the way
context
.assets
.populate(&context.destination, &context.minify)?;
Ok(())
}
fn generate_collections_var(
posts_data: &[liquid::model::Value],
context: &Context,
) -> (kstring::KString, liquid::model::Value) {
let mut posts_variable = context.posts.attributes.clone();
posts_variable.insert(
"pages".into(),
liquid::model::Value::Array(posts_data.to_vec()),
);
let global_collection: liquid::Object = vec![(
context.posts.slug.clone().into(),
liquid::model::Value::Object(posts_variable),
)]
.into_iter()
.collect();
(
"collections".into(),
liquid::model::Value::Object(global_collection),
)
}
fn generate_doc(
doc: &mut Document,
context: &Context,
global_collection: (kstring::KString, liquid::model::Value),
) -> Result<()> {
// Everything done with `globals` is terrible for performance. liquid#95 allows us to
// improve this.
let mut globals: liquid::Object = vec![
(
"site".into(),
liquid::model::Value::Object(context.site.clone()),
),
global_collection,
]
.into_iter()
.collect();
globals.insert(
"page".into(),
liquid::model::Value::Object(doc.attributes.clone()),
);
{
let render_context = RenderContex {
parser: &context.liquid,
markdown: &context.markdown,
globals: &globals,
minify: context.minify.clone(),
};
doc.render_excerpt(&render_context).with_context(|_| {
failure::format_err!("Failed to render excerpt for {}", doc.file_path.display())
})?;
doc.render_content(&render_context).with_context(|_| {
failure::format_err!("Failed to render content for {}", doc.file_path.display())
})?;
}
// Refresh `page` with the `excerpt` / `content` attribute
globals.insert(
"page".into(),
liquid::model::Value::Object(doc.attributes.clone()),
);
let render_context = RenderContex {
parser: &context.liquid,
markdown: &context.markdown,
globals: &globals,
minify: context.minify.clone(),
};
let doc_html = doc
.render(&render_context, &context.layouts)
.with_context(|_| {
failure::format_err!("Failed to render for {}", doc.file_path.display())
})?;
files::write_document_file(doc_html, context.destination.join(&doc.file_path))?;
Ok(())
}
fn generate_pages(posts: Vec<Document>, documents: Vec<Document>, context: &Context) -> Result<()> {
// during post rendering additional attributes such as content were
// added to posts. collect them so that non-post documents can access them
let posts_data: Vec<liquid::model::Value> = posts
.into_iter()
.map(|x| liquid::model::Value::Object(x.attributes))
.collect();
trace!("Generating other documents");
for mut doc in documents {
trace!("Generating {}", doc.url_path);
if doc.front.pagination.is_some() {
let paginators = pagination::generate_paginators(&mut doc, &posts_data)?;
// page 1 uses frontmatter.permalink instead of paginator.permalink
let mut paginators = paginators.into_iter();
let paginator = paginators
.next()
.expect("We detected pagination enabled but we have no paginator");
generate_doc(
&mut doc,
context,
(
"paginator".into(),
liquid::model::Value::Object(paginator.into()),
),
)?;
for paginator in paginators {
let mut doc_page = doc.clone();
doc_page.file_path = permalink::format_url_as_file(&paginator.index_permalink);
generate_doc(
&mut doc_page,
context,
(
"paginator".into(),
liquid::model::Value::Object(paginator.into()),
),
)?;
}
} else {
generate_doc(
&mut doc,
context,
generate_collections_var(&posts_data, &context),
)?;
};
}
Ok(())
}
fn generate_posts(posts: &mut Vec<Document>, context: &Context) -> Result<()> {
// collect all posts attributes to pass them to other posts for rendering
let simple_posts_data: Vec<liquid::model::Value> = posts
.iter()
.map(|x| liquid::model::Value::Object(x.attributes.clone()))
.collect();
trace!("Generating posts");
for (i, post) in &mut posts.iter_mut().enumerate() {
trace!("Generating {}", post.url_path);
// posts are in reverse date order, so previous post is the next in the list (+1)
let previous = simple_posts_data
.get(i + 1)
.cloned()
.unwrap_or(liquid::model::Value::Nil);
post.attributes.insert("previous".into(), previous);
let next = if i >= 1 {
simple_posts_data.get(i - 1)
} else {
None
}
.cloned()
.unwrap_or(liquid::model::Value::Nil);
post.attributes.insert("next".into(), next);
generate_doc(
post,
context,
generate_collections_var(&simple_posts_data, &context),
)?;
}
Ok(())
}
fn sort_pages(posts: &mut Vec<Document>, collection: &Collection) -> Result<()> {
// January 1, 1970 0:00:00 UTC, the beginning of time
let default_date = cobalt_model::DateTime::default();
// sort documents by date, if there's no date (none was provided or it couldn't be read) then
// fall back to the default date
posts.sort_by(|a, b| {
b.front
.published_date
.unwrap_or(default_date)
.cmp(&a.front.published_date.unwrap_or(default_date))
});
match collection.order {
SortOrder::Asc => posts.reverse(),
SortOrder::Desc | SortOrder::None => (),
}
Ok(())
}
fn parse_drafts(
drafts_root: &path::Path,
draft_files: &files::Files,
documents: &mut Vec<Document>,
collection: &Collection,
) -> Result<()> {
let rel_real = collection
.pages
.subtree()
.strip_prefix(collection.pages.root())
.expect("subtree is under root");
for file_path in draft_files.files() {
// Provide a fake path as if it was not a draft
let rel_src = file_path
.strip_prefix(&drafts_root)
.expect("file was found under the root");
let new_path = rel_real.join(rel_src);
let default_front = collection.default.clone().set_draft(true);
let doc = Document::parse(&file_path, &new_path, default_front)
.with_context(|_| failure::format_err!("Failed to parse {}", rel_src.display()))?;
documents.push(doc);
}
Ok(())
}
fn find_layouts(layouts: &path::Path) -> Result<files::Files> {
let mut files = files::FilesBuilder::new(layouts)?;
files.ignore_hidden(false)?;
files.build()
}
fn parse_layouts(files: &files::Files) -> HashMap<String, String> {
let (entries, errors): (Vec<_>, Vec<_>) = files
.files()
.map(|file_path| {
let rel_src = file_path
.strip_prefix(files.root())
.expect("file was found under the root");
let layout_data = files::read_file(&file_path).with_context(|_| {
failure::format_err!("Failed to load layout {}", rel_src.display())
})?;
let path = rel_src
.to_str()
.ok_or_else(|| {
failure::format_err!("File name not valid liquid path: {}", rel_src.display())
})?
.to_owned();
Ok((path, layout_data))
})
.partition(Result::is_ok);
for error in errors {
warn!("{}", error.expect_err("partition to filter out oks"));
}
entries
.into_iter()
.map(|entry| entry.expect("partition to filter out errors"))
.collect()
}
fn parse_pages(
page_files: &files::Files,
collection: &Collection,
source: &path::Path,
) -> Result<Vec<Document>> {
let mut documents = vec![];
for file_path in page_files.files() {
let rel_src = file_path
.strip_prefix(source)
.expect("file was found under the root");
let default_front = collection.default.clone();
let doc = Document::parse(&file_path, rel_src, default_front)
.with_context(|_| failure::format_err!("Failed to parse {}", rel_src.display()))?;
if !doc.front.is_draft || collection.include_drafts {
documents.push(doc);
}
}
Ok(documents)
}
// creates a new RSS file with the contents of the site blog
fn create_rss(
path: &str,
dest: &path::Path,
collection: &Collection,
documents: &[Document],
) -> Result<()> {
let rss_path = dest.join(path);
debug!("Creating RSS file at {}", rss_path.display());
let title = &collection.title;
let description = collection.description.as_deref().unwrap_or("");
let link = collection
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for RSS support"))?;
let items: Result<Vec<rss::Item>> = documents.iter().map(|doc| doc.to_rss(link)).collect();
let items = items?;
let channel = rss::ChannelBuilder::default()
.title(title.to_owned())
.link(link.to_owned())
.description(description.to_owned())
.items(items)
.build()
.map_err(failure::err_msg)?;
let rss_string = channel.to_string();
trace!("RSS data: {}", rss_string);
// create target directories if any exist
if let Some(parent) = rss_path.parent() {
fs::create_dir_all(parent)
.with_context(|_| failure::format_err!("Could not create {}", parent.display()))?;
}
let mut rss_file = fs::File::create(&rss_path)?;
rss_file.write_all(br#"<?xml version="1.0" encoding="UTF-8"?>"#)?;
rss_file.write_all(&rss_string.into_bytes())?;
rss_file.write_all(b"\n")?;
Ok(())
}
// creates a new jsonfeed file with the contents of the site blog
fn create_jsonfeed(
path: &str,
dest: &path::Path,
collection: &Collection,
documents: &[Document],
) -> Result<()> {
let jsonfeed_path = dest.join(path);
debug!("Creating jsonfeed file at {}", jsonfeed_path.display());
let title = &collection.title;
let description = collection.description.as_deref().unwrap_or("");
let link = collection
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for jsonfeed support"))?;
let jsonitems = documents.iter().map(|doc| doc.to_jsonfeed(link)).collect();
let feed = Feed {
title: title.to_string(),
items: jsonitems,
home_page_url: Some(link.to_string()),
description: Some(description.to_string()),
..Default::default()
};
let jsonfeed_string = jsonfeed::to_string(&feed).unwrap();
files::write_document_file(jsonfeed_string, jsonfeed_path)?;
Ok(())
}
fn create_sitemap(
sitemap_path: &path::Path,
collection: &Collection,
documents: &[Document],
collection_pages: &Collection,
documents_pages: &[Document],
) -> Result<()> {
debug!("Creating sitemap file at {}", sitemap_path.display());
let mut buff = Vec::new();
let writer = SiteMapWriter::new(&mut buff);
let link = collection
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for sitemap support"))?;
let mut urls = writer.start_urlset()?;
for doc in documents {
doc.to_sitemap(link, &mut urls)?;
}
let link = collection_pages
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for sitemap support"))?;
for doc in documents_pages {
doc.to_sitemap(link, &mut urls)?;
}
urls.end()?;
files::write_document_file(String::from_utf8(buff)?, sitemap_path)?;
Ok(())
}
fix(rss) Remove duplicate XML declaration in RSS file. (#856)
The extra XML declaration prevents the XML from being parsed.
use std::collections::HashMap;
use std::fs;
use std::io::Write;
use std::path;
use failure::ResultExt;
use jsonfeed;
use jsonfeed::Feed;
use liquid;
use rss;
use sitemap::writer::SiteMapWriter;
use crate::cobalt_model;
use crate::cobalt_model::files;
use crate::cobalt_model::permalink;
use crate::cobalt_model::Collection;
use crate::cobalt_model::{Config, Minify, SortOrder};
use crate::document::{Document, RenderContex};
use crate::error::*;
use crate::pagination;
struct Context {
pub source: path::PathBuf,
pub destination: path::PathBuf,
pub pages: cobalt_model::Collection,
pub posts: cobalt_model::Collection,
pub site: liquid::Object,
pub layouts: HashMap<String, String>,
pub liquid: cobalt_model::Liquid,
pub markdown: cobalt_model::Markdown,
pub assets: cobalt_model::Assets,
pub sitemap: Option<String>,
pub minify: Minify,
}
impl Context {
fn with_config(config: Config) -> Result<Self> {
let Config {
source,
destination,
pages,
posts,
site,
layouts_dir,
liquid,
markdown,
assets,
sitemap,
minify,
} = config;
let pages = pages.build()?;
let posts = posts.build()?;
let site = site.build()?;
let liquid = liquid.build()?;
let markdown = markdown.build();
let assets = assets.build()?;
let layouts = find_layouts(&layouts_dir)?;
let layouts = parse_layouts(&layouts);
let context = Context {
source,
destination,
pages,
posts,
site,
layouts,
liquid,
markdown,
assets,
sitemap,
minify,
};
Ok(context)
}
}
/// The primary build function that transforms a directory into a site
pub fn build(config: Config) -> Result<()> {
let context = Context::with_config(config)?;
let post_files = &context.posts.pages;
let mut posts = parse_pages(post_files, &context.posts, &context.source)?;
if let Some(ref drafts) = context.posts.drafts {
let drafts_root = drafts.subtree();
parse_drafts(drafts_root, drafts, &mut posts, &context.posts)?;
}
let page_files = &context.pages.pages;
let documents = parse_pages(page_files, &context.pages, &context.source)?;
sort_pages(&mut posts, &context.posts)?;
generate_posts(&mut posts, &context)?;
// check if we should create an RSS file and create it!
if let Some(ref path) = context.posts.rss {
create_rss(path, &context.destination, &context.posts, &posts)?;
}
// check if we should create an jsonfeed file and create it!
if let Some(ref path) = context.posts.jsonfeed {
create_jsonfeed(path, &context.destination, &context.posts, &posts)?;
}
if let Some(ref path) = context.sitemap {
let sitemap_path = &context.destination.join(path);
create_sitemap(
&sitemap_path,
&context.posts,
&posts,
&context.pages,
&documents,
)?;
}
generate_pages(posts, documents, &context)?;
// copy all remaining files in the source to the destination
// compile SASS along the way
context
.assets
.populate(&context.destination, &context.minify)?;
Ok(())
}
fn generate_collections_var(
posts_data: &[liquid::model::Value],
context: &Context,
) -> (kstring::KString, liquid::model::Value) {
let mut posts_variable = context.posts.attributes.clone();
posts_variable.insert(
"pages".into(),
liquid::model::Value::Array(posts_data.to_vec()),
);
let global_collection: liquid::Object = vec![(
context.posts.slug.clone().into(),
liquid::model::Value::Object(posts_variable),
)]
.into_iter()
.collect();
(
"collections".into(),
liquid::model::Value::Object(global_collection),
)
}
fn generate_doc(
doc: &mut Document,
context: &Context,
global_collection: (kstring::KString, liquid::model::Value),
) -> Result<()> {
// Everything done with `globals` is terrible for performance. liquid#95 allows us to
// improve this.
let mut globals: liquid::Object = vec![
(
"site".into(),
liquid::model::Value::Object(context.site.clone()),
),
global_collection,
]
.into_iter()
.collect();
globals.insert(
"page".into(),
liquid::model::Value::Object(doc.attributes.clone()),
);
{
let render_context = RenderContex {
parser: &context.liquid,
markdown: &context.markdown,
globals: &globals,
minify: context.minify.clone(),
};
doc.render_excerpt(&render_context).with_context(|_| {
failure::format_err!("Failed to render excerpt for {}", doc.file_path.display())
})?;
doc.render_content(&render_context).with_context(|_| {
failure::format_err!("Failed to render content for {}", doc.file_path.display())
})?;
}
// Refresh `page` with the `excerpt` / `content` attribute
globals.insert(
"page".into(),
liquid::model::Value::Object(doc.attributes.clone()),
);
let render_context = RenderContex {
parser: &context.liquid,
markdown: &context.markdown,
globals: &globals,
minify: context.minify.clone(),
};
let doc_html = doc
.render(&render_context, &context.layouts)
.with_context(|_| {
failure::format_err!("Failed to render for {}", doc.file_path.display())
})?;
files::write_document_file(doc_html, context.destination.join(&doc.file_path))?;
Ok(())
}
fn generate_pages(posts: Vec<Document>, documents: Vec<Document>, context: &Context) -> Result<()> {
// during post rendering additional attributes such as content were
// added to posts. collect them so that non-post documents can access them
let posts_data: Vec<liquid::model::Value> = posts
.into_iter()
.map(|x| liquid::model::Value::Object(x.attributes))
.collect();
trace!("Generating other documents");
for mut doc in documents {
trace!("Generating {}", doc.url_path);
if doc.front.pagination.is_some() {
let paginators = pagination::generate_paginators(&mut doc, &posts_data)?;
// page 1 uses frontmatter.permalink instead of paginator.permalink
let mut paginators = paginators.into_iter();
let paginator = paginators
.next()
.expect("We detected pagination enabled but we have no paginator");
generate_doc(
&mut doc,
context,
(
"paginator".into(),
liquid::model::Value::Object(paginator.into()),
),
)?;
for paginator in paginators {
let mut doc_page = doc.clone();
doc_page.file_path = permalink::format_url_as_file(&paginator.index_permalink);
generate_doc(
&mut doc_page,
context,
(
"paginator".into(),
liquid::model::Value::Object(paginator.into()),
),
)?;
}
} else {
generate_doc(
&mut doc,
context,
generate_collections_var(&posts_data, &context),
)?;
};
}
Ok(())
}
fn generate_posts(posts: &mut Vec<Document>, context: &Context) -> Result<()> {
// collect all posts attributes to pass them to other posts for rendering
let simple_posts_data: Vec<liquid::model::Value> = posts
.iter()
.map(|x| liquid::model::Value::Object(x.attributes.clone()))
.collect();
trace!("Generating posts");
for (i, post) in &mut posts.iter_mut().enumerate() {
trace!("Generating {}", post.url_path);
// posts are in reverse date order, so previous post is the next in the list (+1)
let previous = simple_posts_data
.get(i + 1)
.cloned()
.unwrap_or(liquid::model::Value::Nil);
post.attributes.insert("previous".into(), previous);
let next = if i >= 1 {
simple_posts_data.get(i - 1)
} else {
None
}
.cloned()
.unwrap_or(liquid::model::Value::Nil);
post.attributes.insert("next".into(), next);
generate_doc(
post,
context,
generate_collections_var(&simple_posts_data, &context),
)?;
}
Ok(())
}
fn sort_pages(posts: &mut Vec<Document>, collection: &Collection) -> Result<()> {
// January 1, 1970 0:00:00 UTC, the beginning of time
let default_date = cobalt_model::DateTime::default();
// sort documents by date, if there's no date (none was provided or it couldn't be read) then
// fall back to the default date
posts.sort_by(|a, b| {
b.front
.published_date
.unwrap_or(default_date)
.cmp(&a.front.published_date.unwrap_or(default_date))
});
match collection.order {
SortOrder::Asc => posts.reverse(),
SortOrder::Desc | SortOrder::None => (),
}
Ok(())
}
fn parse_drafts(
drafts_root: &path::Path,
draft_files: &files::Files,
documents: &mut Vec<Document>,
collection: &Collection,
) -> Result<()> {
let rel_real = collection
.pages
.subtree()
.strip_prefix(collection.pages.root())
.expect("subtree is under root");
for file_path in draft_files.files() {
// Provide a fake path as if it was not a draft
let rel_src = file_path
.strip_prefix(&drafts_root)
.expect("file was found under the root");
let new_path = rel_real.join(rel_src);
let default_front = collection.default.clone().set_draft(true);
let doc = Document::parse(&file_path, &new_path, default_front)
.with_context(|_| failure::format_err!("Failed to parse {}", rel_src.display()))?;
documents.push(doc);
}
Ok(())
}
fn find_layouts(layouts: &path::Path) -> Result<files::Files> {
let mut files = files::FilesBuilder::new(layouts)?;
files.ignore_hidden(false)?;
files.build()
}
fn parse_layouts(files: &files::Files) -> HashMap<String, String> {
let (entries, errors): (Vec<_>, Vec<_>) = files
.files()
.map(|file_path| {
let rel_src = file_path
.strip_prefix(files.root())
.expect("file was found under the root");
let layout_data = files::read_file(&file_path).with_context(|_| {
failure::format_err!("Failed to load layout {}", rel_src.display())
})?;
let path = rel_src
.to_str()
.ok_or_else(|| {
failure::format_err!("File name not valid liquid path: {}", rel_src.display())
})?
.to_owned();
Ok((path, layout_data))
})
.partition(Result::is_ok);
for error in errors {
warn!("{}", error.expect_err("partition to filter out oks"));
}
entries
.into_iter()
.map(|entry| entry.expect("partition to filter out errors"))
.collect()
}
fn parse_pages(
page_files: &files::Files,
collection: &Collection,
source: &path::Path,
) -> Result<Vec<Document>> {
let mut documents = vec![];
for file_path in page_files.files() {
let rel_src = file_path
.strip_prefix(source)
.expect("file was found under the root");
let default_front = collection.default.clone();
let doc = Document::parse(&file_path, rel_src, default_front)
.with_context(|_| failure::format_err!("Failed to parse {}", rel_src.display()))?;
if !doc.front.is_draft || collection.include_drafts {
documents.push(doc);
}
}
Ok(documents)
}
// creates a new RSS file with the contents of the site blog
fn create_rss(
path: &str,
dest: &path::Path,
collection: &Collection,
documents: &[Document],
) -> Result<()> {
let rss_path = dest.join(path);
debug!("Creating RSS file at {}", rss_path.display());
let title = &collection.title;
let description = collection.description.as_deref().unwrap_or("");
let link = collection
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for RSS support"))?;
let items: Result<Vec<rss::Item>> = documents.iter().map(|doc| doc.to_rss(link)).collect();
let items = items?;
let channel = rss::ChannelBuilder::default()
.title(title.to_owned())
.link(link.to_owned())
.description(description.to_owned())
.items(items)
.build()
.map_err(failure::err_msg)?;
let rss_string = channel.to_string();
trace!("RSS data: {}", rss_string);
// create target directories if any exist
if let Some(parent) = rss_path.parent() {
fs::create_dir_all(parent)
.with_context(|_| failure::format_err!("Could not create {}", parent.display()))?;
}
let mut rss_file = fs::File::create(&rss_path)?;
rss_file.write_all(&rss_string.into_bytes())?;
rss_file.write_all(b"\n")?;
Ok(())
}
// creates a new jsonfeed file with the contents of the site blog
fn create_jsonfeed(
path: &str,
dest: &path::Path,
collection: &Collection,
documents: &[Document],
) -> Result<()> {
let jsonfeed_path = dest.join(path);
debug!("Creating jsonfeed file at {}", jsonfeed_path.display());
let title = &collection.title;
let description = collection.description.as_deref().unwrap_or("");
let link = collection
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for jsonfeed support"))?;
let jsonitems = documents.iter().map(|doc| doc.to_jsonfeed(link)).collect();
let feed = Feed {
title: title.to_string(),
items: jsonitems,
home_page_url: Some(link.to_string()),
description: Some(description.to_string()),
..Default::default()
};
let jsonfeed_string = jsonfeed::to_string(&feed).unwrap();
files::write_document_file(jsonfeed_string, jsonfeed_path)?;
Ok(())
}
fn create_sitemap(
sitemap_path: &path::Path,
collection: &Collection,
documents: &[Document],
collection_pages: &Collection,
documents_pages: &[Document],
) -> Result<()> {
debug!("Creating sitemap file at {}", sitemap_path.display());
let mut buff = Vec::new();
let writer = SiteMapWriter::new(&mut buff);
let link = collection
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for sitemap support"))?;
let mut urls = writer.start_urlset()?;
for doc in documents {
doc.to_sitemap(link, &mut urls)?;
}
let link = collection_pages
.base_url
.as_ref()
.ok_or_else(|| failure::err_msg("`base_url` is required for sitemap support"))?;
for doc in documents_pages {
doc.to_sitemap(link, &mut urls)?;
}
urls.end()?;
files::write_document_file(String::from_utf8(buff)?, sitemap_path)?;
Ok(())
}
|
use super::request::*;
use std::cmp::Ordering;
#[derive(Debug)]
pub struct HSB {
pub hue: u16,
pub saturation: u8,
pub brightness: u8,
}
pub struct RGB {
pub red: u8,
pub green: u8,
pub blue: u8,
}
#[derive(Debug)]
pub struct HSBK {
pub hue: u16,
pub saturation: u8,
pub brightness: u8,
pub kelvin: u16,
}
impl HSB {
pub fn new(h: u16, s: u8, b: u8) -> HSB {
HSB {
hue: h,
saturation: s,
brightness: b,
}
}
}
const WORDSIZE: f32 = 65535.0;
// LIFX uses HSB aka HSV, not HSL.
pub fn hue_degrees_to_word(degrees: u16) -> [u8; 2] {
let f = degrees as f32 / 360.0 * WORDSIZE;
let b = RequestBin::u16_to_u8_array(f as u16);
[b[0], b[1]]
}
pub fn hue_word_to_degrees(word: u16) -> u16 {
let f: f32 = word as f32 / WORDSIZE;
let d: u16 = (f * 360.0) as u16;
d
}
pub fn saturation_percent_to_word(percent: u8) -> [u8; 2] {
let f = percent as f32 / 100.0 * WORDSIZE;
let b = RequestBin::u16_to_u8_array(f as u16);
[b[0], b[1]]
}
pub fn saturation_word_to_percent(word: u16) -> u8 {
let f = word as f32 / WORDSIZE;
let p = (f * 100.0) as u8;
p
}
pub fn brightness_percent_to_word(percent: u8) -> [u8; 2] {
saturation_percent_to_word(percent)
}
pub fn brightness_word_to_percent(word: u16) -> u8 {
let f = word as f32 / WORDSIZE;
let p = (f * 100.0) as u8;
p
}
pub fn rgb_to_hsv(rgb: RGB) -> HSB {
let r1 = rgb.red as f32 / 255.0;
let g1 = rgb.green as f32 / 255.0;
let b1 = rgb.blue as f32 / 255.0;
let mut floats: Vec<f32> = vec![r1, g1, b1];
floats.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Equal));
let cmax = floats[2];
let cmin = floats[0];
let d = cmax - cmin;
// Hue.
let h = match cmax {
_ if r1 == cmax => (((g1 - b1) / d) % 6.0) * 60.0,
_ if g1 == cmax => (((b1 - r1) / d) + 2.0) * 60.0,
_ if b1 == cmax => (((r1 - g1) / d) + 4.0) * 60.0,
_ => 0.0,
};
// Saturation.
let s = match cmax {
0.0 => 0.0,
_ => d / cmax,
};
// Value / brightness.
let v = cmax;
HSB {
hue: h as u16,
saturation: (s * 100.0) as u8,
brightness: (v * 100.0) as u8,
}
}
#[cfg(test)]
mod tests {
use colour::*;
#[test]
fn test_hue_degrees_to_word() {
assert_eq!([0x55, 0x55], hue_degrees_to_word(120));
}
#[test]
fn test_hue_word_to_degrees() {
assert_eq!(360, hue_word_to_degrees(65535));
assert_eq!(0, hue_word_to_degrees(0));
assert_eq!(180, hue_word_to_degrees(32768));
}
#[test]
fn test_saturation_percent_to_word() {
assert_eq!([0x7F, 0xFF], saturation_percent_to_word(50));
}
#[test]
fn test_rgb_to_hsv() {
struct Test {
rgb: RGB,
hsb: HSB,
};
let tests = vec![
Test {
rgb: RGB { // olive
red: 128,
green: 128,
blue: 0,
},
hsb: HSB {
hue: 60,
saturation: 100,
brightness: 50,
},
},
Test {
rgb: RGB { // chartreuse
red: 127,
green: 255,
blue: 0,
},
hsb: HSB {
hue: 90,
saturation: 100,
brightness: 100,
},
},
];
for t in tests {
let res = rgb_to_hsv(t.rgb);
assert_eq!(res.hue, t.hsb.hue);
assert_eq!(res.saturation, t.hsb.saturation);
assert_eq!(res.brightness, t.hsb.brightness);
}
}
}
pub const BEIGE: HSB = HSB {
hue: 60,
saturation: 56,
brightness: 91,
};
pub const BLUE: HSB = HSB {
hue: 240,
saturation: 100,
brightness: 50,
};
pub const CHARTREUSE: HSB = HSB {
hue: 90,
saturation: 100,
brightness: 50,
};
pub const CORAL: HSB = HSB {
hue: 16,
saturation: 100,
brightness: 66,
};
pub const CORNFLOWER: HSB = HSB {
hue: 219,
saturation: 79,
brightness: 66,
};
pub const CRIMSON: HSB = HSB {
hue: 348,
saturation: 83,
brightness: 47,
};
pub const DEEP_SKY_BLUE: HSB = HSB {
hue: 195,
saturation: 100,
brightness: 50,
};
pub const GREEN: HSB = HSB {
hue: 120,
saturation: 100,
brightness: 50,
};
pub const RED: HSB = HSB {
hue: 0,
saturation: 100,
brightness: 50,
};
pub const SLATE_GRAY: HSB = HSB {
hue: 210,
saturation: 13,
brightness: 50,
};
Add comments.
use super::request::*;
use std::cmp::Ordering;
/// HSB colour representation - hue, saturation, brightness (aka value).
/// Aka HSV (LIFX terminology) - hue, saturation, value.
/// This is not the same as HSL as used in CSS.
#[derive(Debug)]
pub struct HSB {
pub hue: u16,
pub saturation: u8,
pub brightness: u8,
}
/// RGB colour representation - red, green, blue.
pub struct RGB {
pub red: u8,
pub green: u8,
pub blue: u8,
}
/// HSBK colour representation - hue, saturation, brightness, kelvin.
/// Kelvin seems to be relevant only to whites - temperature of white.
#[derive(Debug)]
pub struct HSBK {
pub hue: u16,
pub saturation: u8,
pub brightness: u8,
pub kelvin: u16,
}
impl HSB {
pub fn new(h: u16, s: u8, b: u8) -> HSB {
HSB {
hue: h,
saturation: s,
brightness: b,
}
}
}
impl From<HSBK> for HSB {
fn from(c: HSBK) -> HSB {
HSB::new(
c.hue,
c.saturation,
c.brightness,
)
}
}
/// The max value of bytes used to represent the colour element as bytes.
const WORDSIZE: f32 = 65535.0;
// LIFX uses HSB aka HSV, not HSL.
pub fn hue_degrees_to_word(degrees: u16) -> [u8; 2] {
let f = degrees as f32 / 360.0 * WORDSIZE;
let b = RequestBin::u16_to_u8_array(f as u16);
[b[0], b[1]]
}
pub fn hue_word_to_degrees(word: u16) -> u16 {
let f: f32 = word as f32 / WORDSIZE;
let d: u16 = (f * 360.0) as u16;
d
}
pub fn saturation_percent_to_word(percent: u8) -> [u8; 2] {
let f = percent as f32 / 100.0 * WORDSIZE;
let b = RequestBin::u16_to_u8_array(f as u16);
[b[0], b[1]]
}
pub fn saturation_word_to_percent(word: u16) -> u8 {
let f = word as f32 / WORDSIZE;
let p = (f * 100.0) as u8;
p
}
pub fn brightness_percent_to_word(percent: u8) -> [u8; 2] {
saturation_percent_to_word(percent)
}
pub fn brightness_word_to_percent(word: u16) -> u8 {
let f = word as f32 / WORDSIZE;
let p = (f * 100.0) as u8;
p
}
pub fn rgb_to_hsv(rgb: RGB) -> HSB {
let r1 = rgb.red as f32 / 255.0;
let g1 = rgb.green as f32 / 255.0;
let b1 = rgb.blue as f32 / 255.0;
let mut floats: Vec<f32> = vec![r1, g1, b1];
floats.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Ordering::Equal));
let cmax = floats[2];
let cmin = floats[0];
let d = cmax - cmin;
// Hue.
let h = match cmax {
_ if r1 == cmax => (((g1 - b1) / d) % 6.0) * 60.0,
_ if g1 == cmax => (((b1 - r1) / d) + 2.0) * 60.0,
_ if b1 == cmax => (((r1 - g1) / d) + 4.0) * 60.0,
_ => 0.0,
};
// Saturation.
let s = match cmax {
0.0 => 0.0,
_ => d / cmax,
};
// Value / brightness.
let v = cmax;
HSB {
hue: h as u16,
saturation: (s * 100.0) as u8,
brightness: (v * 100.0) as u8,
}
}
#[cfg(test)]
mod tests {
use colour::*;
#[test]
fn test_hue_degrees_to_word() {
assert_eq!([0x55, 0x55], hue_degrees_to_word(120));
}
#[test]
fn test_hue_word_to_degrees() {
assert_eq!(360, hue_word_to_degrees(65535));
assert_eq!(0, hue_word_to_degrees(0));
assert_eq!(180, hue_word_to_degrees(32768));
}
#[test]
fn test_saturation_percent_to_word() {
assert_eq!([0x7F, 0xFF], saturation_percent_to_word(50));
}
#[test]
fn test_rgb_to_hsv() {
struct Test {
rgb: RGB,
hsb: HSB,
};
let tests = vec![
Test {
rgb: RGB { // olive
red: 128,
green: 128,
blue: 0,
},
hsb: HSB {
hue: 60,
saturation: 100,
brightness: 50,
},
},
Test {
rgb: RGB { // chartreuse
red: 127,
green: 255,
blue: 0,
},
hsb: HSB {
hue: 90,
saturation: 100,
brightness: 100,
},
},
];
for t in tests {
let res = rgb_to_hsv(t.rgb);
assert_eq!(res.hue, t.hsb.hue);
assert_eq!(res.saturation, t.hsb.saturation);
assert_eq!(res.brightness, t.hsb.brightness);
}
}
}
pub const BEIGE: HSB = HSB {
hue: 60,
saturation: 56,
brightness: 91,
};
pub const BLUE: HSB = HSB {
hue: 240,
saturation: 100,
brightness: 50,
};
pub const CHARTREUSE: HSB = HSB {
hue: 90,
saturation: 100,
brightness: 50,
};
pub const CORAL: HSB = HSB {
hue: 16,
saturation: 100,
brightness: 66,
};
pub const CORNFLOWER: HSB = HSB {
hue: 219,
saturation: 79,
brightness: 66,
};
pub const CRIMSON: HSB = HSB {
hue: 348,
saturation: 83,
brightness: 47,
};
pub const DEEP_SKY_BLUE: HSB = HSB {
hue: 195,
saturation: 100,
brightness: 50,
};
pub const GREEN: HSB = HSB {
hue: 120,
saturation: 100,
brightness: 50,
};
pub const RED: HSB = HSB {
hue: 0,
saturation: 100,
brightness: 50,
};
pub const SLATE_GRAY: HSB = HSB {
hue: 210,
saturation: 13,
brightness: 50,
};
|
use std::io;
use rustc_serialize::json;
use errors::InitializationError;
#[deriving(RustcDecodable)]
pub struct NickServConf {
pub name: String,
pub command: String,
pub account: String,
pub password: String,
pub enabled: bool,
}
#[deriving(RustcDecodable)]
pub struct ClientConfiguration {
pub nick: String,
pub user: String,
pub real_name: String,
pub address: String,
pub nickserv: NickServConf,
pub channels: Vec<String>,
pub command_prefix: String,
pub admins: Vec<String>,
pub on_connect: Vec<String>,
}
impl ClientConfiguration {
pub fn load_from_file(path: &Path) -> Result<ClientConfiguration, InitializationError> {
let config_contents = try!(io::File::open(path).read_to_string());
let client_config = try!(match json::decode::<ClientConfiguration>(config_contents.as_slice()) {
Ok(v) => Ok(v),
Err(e) => {
match e {
json::DecoderError::ParseError(parse_error) => match parse_error {
json::ParserError::SyntaxError(error_code, line, col) => return Err(InitializationError::from_string(format!("Syntax error ({}) on line {} column {} in {}", error_code, line, col, path.display()))),
json::ParserError::IoError(kind, desc) => return Err(InitializationError::Io(io::IoError{ kind: kind, desc: desc, detail: None})),
},
json::DecoderError::MissingFieldError(s) => return Err(InitializationError::from_string(format!("Field {} not found in {}", s.as_slice(), path.display()))),
_ => Err(e),
}
},
});
return Ok(client_config);
}
}
Clean up error handling in config.rs by using fancy inner match branches instead of multiple match statements
use std::io;
use rustc_serialize::json;
use errors::InitializationError;
#[deriving(RustcDecodable)]
pub struct NickServConf {
pub name: String,
pub command: String,
pub account: String,
pub password: String,
pub enabled: bool,
}
#[deriving(RustcDecodable)]
pub struct ClientConfiguration {
pub nick: String,
pub user: String,
pub real_name: String,
pub address: String,
pub nickserv: NickServConf,
pub channels: Vec<String>,
pub command_prefix: String,
pub admins: Vec<String>,
pub on_connect: Vec<String>,
}
impl ClientConfiguration {
pub fn load_from_file(path: &Path) -> Result<ClientConfiguration, InitializationError> {
let config_contents = try!(io::File::open(path).read_to_string());
return Ok(try!(match json::decode::<ClientConfiguration>(config_contents.as_slice()) {
Err(json::DecoderError::MissingFieldError(s))
=> return Err(InitializationError::from_string(format!("Field {} not found in {}", s.as_slice(), path.display()))),
Err(json::DecoderError::ParseError(json::ParserError::SyntaxError(error_code, line, col)))
=> return Err(InitializationError::from_string(format!("Syntax error ({}) on line {} column {} in {}", error_code, line, col, path.display()))),
Err(json::DecoderError::ParseError(json::ParserError::IoError(kind, desc)))
=> return Err(InitializationError::Io(io::IoError{ kind: kind, desc: desc, detail: None})),
Ok(v) => Ok(v),
Err(e) => Err(e),
}));
}
}
|
//! Process configurations for Lightning sites.
// First-party
use std::collections::BTreeMap;
use std::convert::From;
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::u8;
// Third-party
use serde;
use serde_derive::Deserialize;
use serde_yaml;
// First-party
pub use crate::validated_types::Url as ValidatedUrl;
const CONFIG_FILE_NAME: &'static str = "lightning.yaml";
#[derive(Debug, PartialEq, Deserialize)]
pub struct Config {
#[serde(rename = "site_info")]
pub site: SiteInfo,
pub directories: Directories,
pub structure: Structure,
}
impl Config {
pub fn from_file(directory: &PathBuf) -> Result<Config, String> {
let config_path = directory.join(CONFIG_FILE_NAME);
if !config_path.exists() {
return Err(format!(
"The specified configuration path {:} does not exist.",
config_path.to_string_lossy()
));
}
let mut contents = String::new();
File::open(&config_path)
.map_err(|reason| format!("Error reading {:?}: {:?}", config_path, reason))?
.read_to_string(&mut contents)
.map_err(|reason| String::from(reason.description()))?;
Config::parse(&contents)
}
fn parse(source: &str) -> Result<Config, String> {
let config = serde_yaml::from_str(&source).map_err(|e| format!("{:}", e));
// TODO: add some basic validation here
config
}
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Directories {
pub content: PathBuf,
pub output: PathBuf,
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(tag = "type")]
pub enum Taxonomy {
#[serde(rename = "binary")]
Binary {
/// The name of the taxonomy.
name: String,
/// Configuration of the templates for this taxonomy.
templates: Templates,
/// Whether taxonomy terms can be nested.
hierarchical: bool,
/// Specify whether this taxonomy should generate feeds.
#[serde(default)]
generate_feeds: bool,
},
#[serde(rename = "multiple")]
Multiple {
/// The name of the taxonomy.
name: String,
/// Configuration of the templates for this taxonomy.
templates: Templates,
default: Option<String>,
/// Whether an item may be in more than one of this taxonomy term at a
/// time, e.g. whether a blog post may belong to multiple categories.
limit: Option<u8>,
/// Specify whether this taxonomy is required to exist on every item.
#[serde(default)]
required: bool,
/// Whether taxonomy terms can be nested.
hierarchical: bool,
/// Specify whether this taxonomy should generate feeds.
#[serde(default)]
generate_feeds: bool,
},
#[serde(rename = "temporal")]
Temporal {
/// The name of the taxonomy.
name: String,
/// Configuration of the templates for this taxonomy.
templates: Templates,
/// Specify whether this taxonomy is required to exist on every item.
#[serde(default)]
required: bool,
/// Specify whether this taxonomy should generate feeds.
#[serde(default)]
generate_feeds: bool,
},
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct SiteInfo {
/// The name of the site. Required.
pub title: String,
/// The canonical URL for the root of the site. Required.
pub url: String,
/// The description of the site. Optional.
pub description: Option<String>,
/// Arbitrary metadata associated with the site. Optional.
#[serde(deserialize_with = "parse_metadata")]
pub metadata: serde_yaml::Mapping,
}
fn parse_metadata<'de, D>(d: D) -> Result<serde_yaml::Mapping, D::Error>
where
D: serde::Deserializer<'de>,
{
serde::Deserialize::deserialize(d)
.map(|value: Option<_>| value.unwrap_or(serde_yaml::Mapping::new()))
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Templates {
pub item: PathBuf,
pub list: Option<PathBuf>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub enum FeedEngine {
Atom,
RSS,
JSON,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Structure {
directory: PathBuf,
index: String,
taxonomies: Vec<Taxonomy>,
feeds: Feeds,
other_content: OtherContent,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Feeds {
engines: Vec<FeedEngine>,
additional: Vec<AdditionalFeed>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct AdditionalFeed {
name: String,
taxonomies: Vec<TaxonomySubset>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct TaxonomySubset {
taxonomy: String,
terms: Vec<TaxonomyTerm>,
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(untagged)]
pub enum TaxonomyTerm {
String(String),
Number(u32), // TODO: this is wrong, should be parsed as a year!
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct OtherContent {
copy: Vec<PathBuf>,
#[serde(deserialize_with = "parse_exclude")]
exclude: Vec<PathBuf>,
}
fn parse_exclude<'de, D>(d: D) -> Result<Vec<PathBuf>, D::Error>
where
D: serde::Deserializer<'de>,
{
serde::Deserialize::deserialize(d).map(|value: Option<_>| value.unwrap_or(Vec::new()))
}
#[test]
fn parses_valid_taxonomies() {
const TAXONOMIES: &'static str = "
- name: author
type: multiple
required: true
hierarchical: false
templates:
list: authors.html
item: author.html
generate_feeds: true
- name: category
type: multiple
default: Blog
limit: 1
required: false
hierarchical: false
templates:
list: categories.html
item: category.html
generate_feeds: false
- name: tag
type: multiple
limit: ~
required: false
hierarchical: false
templates:
list: tags.html
item: tag.html
- name: date
type: temporal
required: false
templates:
list: period_archives.html
item: archives.html
- name: page
type: binary
hierarchical: true
templates:
item: page.html
";
let expected = vec![
Taxonomy::Multiple {
name: "author".into(),
default: None,
limit: None,
required: true,
hierarchical: false,
templates: Templates {
item: "author.html".into(),
list: Some("authors.html".into()),
},
generate_feeds: true,
},
Taxonomy::Multiple {
name: "category".into(),
default: Some("Blog".into()),
limit: Some(1),
required: false,
hierarchical: false,
templates: Templates {
item: "category.html".into(),
list: Some("categories.html".into()),
},
generate_feeds: false,
},
Taxonomy::Multiple {
name: "tag".into(),
default: None,
limit: None,
required: false,
hierarchical: false,
templates: Templates {
item: "tag.html".into(),
list: Some("tags.html".into()),
},
generate_feeds: false,
},
Taxonomy::Temporal {
name: "date".into(),
required: false,
templates: Templates {
item: "archives.html".into(),
list: Some("period_archives.html".into()),
},
generate_feeds: false,
},
Taxonomy::Binary {
name: "page".into(),
hierarchical: true,
templates: Templates {
item: "page.html".into(),
list: None,
},
generate_feeds: false,
},
];
let loaded: Vec<Taxonomy> =
serde_yaml::from_str(TAXONOMIES).expect("bad test data: TAXONOMIES");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info() {
const SITE_INFO: &'static str = "\
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata:
foo: bar
quux: 2
";
let mut metadata = serde_yaml::Mapping::new();
metadata.insert("foo".into(), "bar".into());
metadata.insert("quux".into(), 2.into());
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: metadata,
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO).expect("bad test data: SITE_INFO");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info_with_empty_metadata() {
const SITE_INFO_EMPTY_METADATA: &'static str = "
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata: ~
";
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: serde_yaml::Mapping::new(),
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO_EMPTY_METADATA)
.expect("bad test data: SITE_INFO_EMPTY_METADATA");
assert_eq!(expected, loaded);
}
#[test]
fn parses_default_config() {
static DEFAULT_CONFIG: &'static str = include_str!("../tests/pure-config.yaml");
let expected = Config {
site: SiteInfo {
title: "lx (lightning)".into(),
url: "https://lightning.rs".into(),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: serde_yaml::Mapping::new(),
},
directories: Directories {
content: "content".into(),
output: "output".into(),
},
structure: Structure {
directory: "layout".into(),
index: "index.html".into(),
taxonomies: vec![
Taxonomy::Multiple {
name: "author".into(),
required: false,
hierarchical: false,
templates: Templates {
item: "author.html".into(),
list: Some("authors.html".into()),
},
generate_feeds: false,
limit: None,
default: None,
},
Taxonomy::Multiple {
name: "category".into(),
default: Some("Blog".into()),
limit: None,
required: false,
hierarchical: false,
templates: Templates {
item: "category.html".into(),
list: Some("categories.html".into()),
},
generate_feeds: false,
},
Taxonomy::Multiple {
name: "tag".into(),
limit: None,
hierarchical: false,
templates: Templates {
item: "tag.html".into(),
list: Some("tags.html".into()),
},
generate_feeds: false,
required: false,
default: None,
},
Taxonomy::Temporal {
name: "date".into(),
required: false,
templates: Templates {
item: "archives.html".into(),
list: Some("period_archives.html".into()),
},
generate_feeds: false,
},
Taxonomy::Binary {
name: "page".into(),
templates: Templates {
item: "page.html".into(),
list: None,
},
hierarchical: true,
generate_feeds: false,
},
],
feeds: Feeds {
engines: vec![FeedEngine::RSS, FeedEngine::JSON],
additional: vec![
AdditionalFeed {
name: "Art and Tech".into(),
taxonomies: vec![TaxonomySubset {
taxonomy: "categories".into(),
terms: vec![
TaxonomyTerm::String("tech".into()),
TaxonomyTerm::String("art".into()),
],
}],
},
AdditionalFeed {
name: "2018 Family Poetry".into(),
taxonomies: vec![
TaxonomySubset {
taxonomy: "date".into(),
terms: vec![TaxonomyTerm::Number(2018)],
},
TaxonomySubset {
taxonomy: "tags".into(),
terms: vec![
TaxonomyTerm::String("family".into()),
TaxonomyTerm::String("poetry".into()),
],
},
],
},
],
},
other_content: OtherContent {
copy: vec!["static".into(), "extra".into()],
exclude: Vec::new(),
},
},
};
let config: Config = serde_yaml::from_str(DEFAULT_CONFIG).unwrap();
assert_eq!(expected, config, "successfully deserialized basic config");
}
Remove an unused import from config.rs.
//! Process configurations for Lightning sites.
// First-party
use std::convert::From;
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::u8;
// Third-party
use serde;
use serde_derive::Deserialize;
use serde_yaml;
// First-party
pub use crate::validated_types::Url as ValidatedUrl;
const CONFIG_FILE_NAME: &'static str = "lightning.yaml";
#[derive(Debug, PartialEq, Deserialize)]
pub struct Config {
#[serde(rename = "site_info")]
pub site: SiteInfo,
pub directories: Directories,
pub structure: Structure,
}
impl Config {
pub fn from_file(directory: &PathBuf) -> Result<Config, String> {
let config_path = directory.join(CONFIG_FILE_NAME);
if !config_path.exists() {
return Err(format!(
"The specified configuration path {:} does not exist.",
config_path.to_string_lossy()
));
}
let mut contents = String::new();
File::open(&config_path)
.map_err(|reason| format!("Error reading {:?}: {:?}", config_path, reason))?
.read_to_string(&mut contents)
.map_err(|reason| String::from(reason.description()))?;
Config::parse(&contents)
}
fn parse(source: &str) -> Result<Config, String> {
let config = serde_yaml::from_str(&source).map_err(|e| format!("{:}", e));
// TODO: add some basic validation here
config
}
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Directories {
pub content: PathBuf,
pub output: PathBuf,
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(tag = "type")]
pub enum Taxonomy {
#[serde(rename = "binary")]
Binary {
/// The name of the taxonomy.
name: String,
/// Configuration of the templates for this taxonomy.
templates: Templates,
/// Whether taxonomy terms can be nested.
hierarchical: bool,
/// Specify whether this taxonomy should generate feeds.
#[serde(default)]
generate_feeds: bool,
},
#[serde(rename = "multiple")]
Multiple {
/// The name of the taxonomy.
name: String,
/// Configuration of the templates for this taxonomy.
templates: Templates,
default: Option<String>,
/// Whether an item may be in more than one of this taxonomy term at a
/// time, e.g. whether a blog post may belong to multiple categories.
limit: Option<u8>,
/// Specify whether this taxonomy is required to exist on every item.
#[serde(default)]
required: bool,
/// Whether taxonomy terms can be nested.
hierarchical: bool,
/// Specify whether this taxonomy should generate feeds.
#[serde(default)]
generate_feeds: bool,
},
#[serde(rename = "temporal")]
Temporal {
/// The name of the taxonomy.
name: String,
/// Configuration of the templates for this taxonomy.
templates: Templates,
/// Specify whether this taxonomy is required to exist on every item.
#[serde(default)]
required: bool,
/// Specify whether this taxonomy should generate feeds.
#[serde(default)]
generate_feeds: bool,
},
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct SiteInfo {
/// The name of the site. Required.
pub title: String,
/// The canonical URL for the root of the site. Required.
pub url: String,
/// The description of the site. Optional.
pub description: Option<String>,
/// Arbitrary metadata associated with the site. Optional.
#[serde(deserialize_with = "parse_metadata")]
pub metadata: serde_yaml::Mapping,
}
fn parse_metadata<'de, D>(d: D) -> Result<serde_yaml::Mapping, D::Error>
where
D: serde::Deserializer<'de>,
{
serde::Deserialize::deserialize(d)
.map(|value: Option<_>| value.unwrap_or(serde_yaml::Mapping::new()))
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Templates {
pub item: PathBuf,
pub list: Option<PathBuf>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub enum FeedEngine {
Atom,
RSS,
JSON,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Structure {
directory: PathBuf,
index: String,
taxonomies: Vec<Taxonomy>,
feeds: Feeds,
other_content: OtherContent,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Feeds {
engines: Vec<FeedEngine>,
additional: Vec<AdditionalFeed>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct AdditionalFeed {
name: String,
taxonomies: Vec<TaxonomySubset>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct TaxonomySubset {
taxonomy: String,
terms: Vec<TaxonomyTerm>,
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(untagged)]
pub enum TaxonomyTerm {
String(String),
Number(u32), // TODO: this is wrong, should be parsed as a year!
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct OtherContent {
copy: Vec<PathBuf>,
#[serde(deserialize_with = "parse_exclude")]
exclude: Vec<PathBuf>,
}
fn parse_exclude<'de, D>(d: D) -> Result<Vec<PathBuf>, D::Error>
where
D: serde::Deserializer<'de>,
{
serde::Deserialize::deserialize(d).map(|value: Option<_>| value.unwrap_or(Vec::new()))
}
#[test]
fn parses_valid_taxonomies() {
const TAXONOMIES: &'static str = "
- name: author
type: multiple
required: true
hierarchical: false
templates:
list: authors.html
item: author.html
generate_feeds: true
- name: category
type: multiple
default: Blog
limit: 1
required: false
hierarchical: false
templates:
list: categories.html
item: category.html
generate_feeds: false
- name: tag
type: multiple
limit: ~
required: false
hierarchical: false
templates:
list: tags.html
item: tag.html
- name: date
type: temporal
required: false
templates:
list: period_archives.html
item: archives.html
- name: page
type: binary
hierarchical: true
templates:
item: page.html
";
let expected = vec![
Taxonomy::Multiple {
name: "author".into(),
default: None,
limit: None,
required: true,
hierarchical: false,
templates: Templates {
item: "author.html".into(),
list: Some("authors.html".into()),
},
generate_feeds: true,
},
Taxonomy::Multiple {
name: "category".into(),
default: Some("Blog".into()),
limit: Some(1),
required: false,
hierarchical: false,
templates: Templates {
item: "category.html".into(),
list: Some("categories.html".into()),
},
generate_feeds: false,
},
Taxonomy::Multiple {
name: "tag".into(),
default: None,
limit: None,
required: false,
hierarchical: false,
templates: Templates {
item: "tag.html".into(),
list: Some("tags.html".into()),
},
generate_feeds: false,
},
Taxonomy::Temporal {
name: "date".into(),
required: false,
templates: Templates {
item: "archives.html".into(),
list: Some("period_archives.html".into()),
},
generate_feeds: false,
},
Taxonomy::Binary {
name: "page".into(),
hierarchical: true,
templates: Templates {
item: "page.html".into(),
list: None,
},
generate_feeds: false,
},
];
let loaded: Vec<Taxonomy> =
serde_yaml::from_str(TAXONOMIES).expect("bad test data: TAXONOMIES");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info() {
const SITE_INFO: &'static str = "\
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata:
foo: bar
quux: 2
";
let mut metadata = serde_yaml::Mapping::new();
metadata.insert("foo".into(), "bar".into());
metadata.insert("quux".into(), 2.into());
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: metadata,
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO).expect("bad test data: SITE_INFO");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info_with_empty_metadata() {
const SITE_INFO_EMPTY_METADATA: &'static str = "
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata: ~
";
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: serde_yaml::Mapping::new(),
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO_EMPTY_METADATA)
.expect("bad test data: SITE_INFO_EMPTY_METADATA");
assert_eq!(expected, loaded);
}
#[test]
fn parses_default_config() {
static DEFAULT_CONFIG: &'static str = include_str!("../tests/pure-config.yaml");
let expected = Config {
site: SiteInfo {
title: "lx (lightning)".into(),
url: "https://lightning.rs".into(),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: serde_yaml::Mapping::new(),
},
directories: Directories {
content: "content".into(),
output: "output".into(),
},
structure: Structure {
directory: "layout".into(),
index: "index.html".into(),
taxonomies: vec![
Taxonomy::Multiple {
name: "author".into(),
required: false,
hierarchical: false,
templates: Templates {
item: "author.html".into(),
list: Some("authors.html".into()),
},
generate_feeds: false,
limit: None,
default: None,
},
Taxonomy::Multiple {
name: "category".into(),
default: Some("Blog".into()),
limit: None,
required: false,
hierarchical: false,
templates: Templates {
item: "category.html".into(),
list: Some("categories.html".into()),
},
generate_feeds: false,
},
Taxonomy::Multiple {
name: "tag".into(),
limit: None,
hierarchical: false,
templates: Templates {
item: "tag.html".into(),
list: Some("tags.html".into()),
},
generate_feeds: false,
required: false,
default: None,
},
Taxonomy::Temporal {
name: "date".into(),
required: false,
templates: Templates {
item: "archives.html".into(),
list: Some("period_archives.html".into()),
},
generate_feeds: false,
},
Taxonomy::Binary {
name: "page".into(),
templates: Templates {
item: "page.html".into(),
list: None,
},
hierarchical: true,
generate_feeds: false,
},
],
feeds: Feeds {
engines: vec![FeedEngine::RSS, FeedEngine::JSON],
additional: vec![
AdditionalFeed {
name: "Art and Tech".into(),
taxonomies: vec![TaxonomySubset {
taxonomy: "categories".into(),
terms: vec![
TaxonomyTerm::String("tech".into()),
TaxonomyTerm::String("art".into()),
],
}],
},
AdditionalFeed {
name: "2018 Family Poetry".into(),
taxonomies: vec![
TaxonomySubset {
taxonomy: "date".into(),
terms: vec![TaxonomyTerm::Number(2018)],
},
TaxonomySubset {
taxonomy: "tags".into(),
terms: vec![
TaxonomyTerm::String("family".into()),
TaxonomyTerm::String("poetry".into()),
],
},
],
},
],
},
other_content: OtherContent {
copy: vec!["static".into(), "extra".into()],
exclude: Vec::new(),
},
},
};
let config: Config = serde_yaml::from_str(DEFAULT_CONFIG).unwrap();
assert_eq!(expected, config, "successfully deserialized basic config");
}
|
//! Process configurations for Lightning sites.
// First-party
use std::convert::From;
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::u8;
// Third-party
use serde;
use serde_derive::Deserialize;
use serde_yaml;
// First-party
pub use crate::validated_types::Url as ValidatedUrl;
const CONFIG_FILE_NAME: &'static str = "lightning.yaml";
#[derive(Debug, PartialEq, Deserialize)]
pub struct Config {
pub site: SiteInfo,
pub directories: Directories,
pub taxonomies: Vec<Taxonomy>,
}
impl Config {
pub fn from_file(directory: &PathBuf) -> Result<Config, String> {
let config_path = directory.join(CONFIG_FILE_NAME);
if !config_path.exists() {
return Err(format!(
"The specified configuration path {:} does not exist.",
config_path.to_string_lossy()
));
}
let mut contents = String::new();
File::open(&config_path)
.map_err(|reason| format!("Error reading {:?}: {:?}", config_path, reason))?
.read_to_string(&mut contents)
.map_err(|reason| String::from(reason.description()))?;
Config::parse(&contents)
}
fn parse(source: &str) -> Result<Config, String> {
let config = serde_yaml::from_str(&source).map_err(|e| format!("{:}", e));
// TODO: add some basic validation here
config
}
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Directories {
pub content: PathBuf,
pub output: PathBuf,
pub template: PathBuf,
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(tag = "type")]
pub enum Taxonomy {
#[serde(rename = "binary")]
Binary {
name: String,
templates: Templates,
hierarchical: bool,
/// The feed types associated with the taxonomy
#[serde(default)]
feeds: Vec<Feed>,
},
#[serde(rename = "multiple")]
Multiple {
name: String,
templates: Templates,
default: Option<String>,
limit: Option<u8>,
required: bool,
hierarchical: bool,
/// The feed types associated with the taxonomy
#[serde(default)]
feeds: Vec<Feed>,
},
#[serde(rename = "temporal")]
Temporal {
name: String,
templates: Templates,
required: bool,
/// The feed types associated with the taxonomy
#[serde(default)]
feeds: Vec<Feed>,
},
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct SiteInfo {
/// The name of the site. Required.
pub title: String,
/// The canonical URL for the root of the site. Required.
pub url: String,
/// The description of the site. Optional.
pub description: Option<String>,
/// Arbitrary metadata associated with the site. Optional.
#[serde(deserialize_with = "parse_metadata")]
pub metadata: serde_yaml::Mapping,
}
fn parse_metadata<'de, D>(d: D) -> Result<serde_yaml::Mapping, D::Error>
where
D: serde::Deserializer<'de>,
{
serde::Deserialize::deserialize(d)
.map(|value: Option<_>| value.unwrap_or(serde_yaml::Mapping::new()))
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Templates {
pub item: PathBuf,
pub list: Option<PathBuf>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub enum Feed {
Atom,
RSS,
JSON,
}
#[test]
fn parses_valid_taxonomies() {
const TAXONOMIES: &'static str = "
- name: author
type: multiple
required: true
hierarchical: false
templates:
list: authors.html
item: author.html
- name: category
type: multiple
default: Blog
limit: 1
required: false
hierarchical: false
templates:
list: categories.html
item: category.html
- name: tag
type: multiple
limit: ~
required: false
hierarchical: false
templates:
list: tags.html
item: tag.html
- name: date
type: temporal
required: false
templates:
list: period_archives.html
item: archives.html
- name: page
type: binary
hierarchical: true
templates:
item: page.html
";
let expected = vec![
Taxonomy::Multiple {
name: "author".into(),
default: None,
limit: None,
required: true,
hierarchical: false,
templates: Templates {
item: "author.html".into(),
list: Some("authors.html".into()),
},
feeds: vec![],
},
Taxonomy::Multiple {
name: "category".into(),
default: Some("Blog".into()),
limit: Some(1),
required: false,
hierarchical: false,
templates: Templates {
item: "category.html".into(),
list: Some("categories.html".into()),
},
feeds: vec![],
},
Taxonomy::Multiple {
name: "tag".into(),
default: None,
limit: None,
required: false,
hierarchical: false,
templates: Templates {
item: "tag.html".into(),
list: Some("tags.html".into()),
},
feeds: vec![],
},
Taxonomy::Temporal {
name: "date".into(),
required: false,
templates: Templates {
item: "archives.html".into(),
list: Some("period_archives.html".into()),
},
feeds: vec![],
},
Taxonomy::Binary {
name: "page".into(),
hierarchical: true,
templates: Templates {
item: "page.html".into(),
list: None,
},
feeds: vec![],
},
];
let loaded: Vec<Taxonomy> =
serde_yaml::from_str(TAXONOMIES).expect("bad test data: TAXONOMIES");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info() {
const SITE_INFO: &'static str = "\
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata:
foo: bar
quux: 2
";
let mut metadata = serde_yaml::Mapping::new();
metadata.insert("foo".into(), "bar".into());
metadata.insert("quux".into(), 2.into());
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: metadata,
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO).expect("bad test data: SITE_INFO");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info_with_empty_metadata() {
const SITE_INFO_EMPTY_METADATA: &'static str = "
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata: ~
";
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: serde_yaml::Mapping::new(),
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO_EMPTY_METADATA)
.expect("bad test data: SITE_INFO_EMPTY_METADATA");
assert_eq!(expected, loaded);
}
Include `feeds` for taxonomies in tests.
//! Process configurations for Lightning sites.
// First-party
use std::convert::From;
use std::error::Error;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::u8;
// Third-party
use serde;
use serde_derive::Deserialize;
use serde_yaml;
// First-party
pub use crate::validated_types::Url as ValidatedUrl;
const CONFIG_FILE_NAME: &'static str = "lightning.yaml";
#[derive(Debug, PartialEq, Deserialize)]
pub struct Config {
pub site: SiteInfo,
pub directories: Directories,
pub taxonomies: Vec<Taxonomy>,
}
impl Config {
pub fn from_file(directory: &PathBuf) -> Result<Config, String> {
let config_path = directory.join(CONFIG_FILE_NAME);
if !config_path.exists() {
return Err(format!(
"The specified configuration path {:} does not exist.",
config_path.to_string_lossy()
));
}
let mut contents = String::new();
File::open(&config_path)
.map_err(|reason| format!("Error reading {:?}: {:?}", config_path, reason))?
.read_to_string(&mut contents)
.map_err(|reason| String::from(reason.description()))?;
Config::parse(&contents)
}
fn parse(source: &str) -> Result<Config, String> {
let config = serde_yaml::from_str(&source).map_err(|e| format!("{:}", e));
// TODO: add some basic validation here
config
}
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Directories {
pub content: PathBuf,
pub output: PathBuf,
pub template: PathBuf,
}
#[derive(Debug, PartialEq, Deserialize)]
#[serde(tag = "type")]
pub enum Taxonomy {
#[serde(rename = "binary")]
Binary {
name: String,
templates: Templates,
hierarchical: bool,
/// The feed types associated with the taxonomy
#[serde(default)]
feeds: Vec<Feed>,
},
#[serde(rename = "multiple")]
Multiple {
name: String,
templates: Templates,
default: Option<String>,
limit: Option<u8>,
required: bool,
hierarchical: bool,
/// The feed types associated with the taxonomy
#[serde(default)]
feeds: Vec<Feed>,
},
#[serde(rename = "temporal")]
Temporal {
name: String,
templates: Templates,
required: bool,
/// The feed types associated with the taxonomy
#[serde(default)]
feeds: Vec<Feed>,
},
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct SiteInfo {
/// The name of the site. Required.
pub title: String,
/// The canonical URL for the root of the site. Required.
pub url: String,
/// The description of the site. Optional.
pub description: Option<String>,
/// Arbitrary metadata associated with the site. Optional.
#[serde(deserialize_with = "parse_metadata")]
pub metadata: serde_yaml::Mapping,
}
fn parse_metadata<'de, D>(d: D) -> Result<serde_yaml::Mapping, D::Error>
where
D: serde::Deserializer<'de>,
{
serde::Deserialize::deserialize(d)
.map(|value: Option<_>| value.unwrap_or(serde_yaml::Mapping::new()))
}
#[derive(Debug, PartialEq, Deserialize)]
pub struct Templates {
pub item: PathBuf,
pub list: Option<PathBuf>,
}
#[derive(Debug, PartialEq, Deserialize)]
pub enum Feed {
Atom,
RSS,
JSON,
}
#[test]
fn parses_valid_taxonomies() {
const TAXONOMIES: &'static str = "
- name: author
type: multiple
required: true
hierarchical: false
templates:
list: authors.html
item: author.html
feeds:
- RSS
- name: category
type: multiple
default: Blog
limit: 1
required: false
hierarchical: false
templates:
list: categories.html
item: category.html
feeds:
- Atom
- name: tag
type: multiple
limit: ~
required: false
hierarchical: false
templates:
list: tags.html
item: tag.html
feeds:
- JSON
- name: date
type: temporal
required: false
templates:
list: period_archives.html
item: archives.html
feeds:
- RSS
- Atom
- JSON
- name: page
type: binary
hierarchical: true
templates:
item: page.html
";
let expected = vec![
Taxonomy::Multiple {
name: "author".into(),
default: None,
limit: None,
required: true,
hierarchical: false,
templates: Templates {
item: "author.html".into(),
list: Some("authors.html".into()),
},
feeds: vec![Feed::RSS],
},
Taxonomy::Multiple {
name: "category".into(),
default: Some("Blog".into()),
limit: Some(1),
required: false,
hierarchical: false,
templates: Templates {
item: "category.html".into(),
list: Some("categories.html".into()),
},
feeds: vec![Feed::Atom],
},
Taxonomy::Multiple {
name: "tag".into(),
default: None,
limit: None,
required: false,
hierarchical: false,
templates: Templates {
item: "tag.html".into(),
list: Some("tags.html".into()),
},
feeds: vec![Feed::JSON],
},
Taxonomy::Temporal {
name: "date".into(),
required: false,
templates: Templates {
item: "archives.html".into(),
list: Some("period_archives.html".into()),
},
feeds: vec![Feed::RSS, Feed::Atom, Feed::JSON],
},
Taxonomy::Binary {
name: "page".into(),
hierarchical: true,
templates: Templates {
item: "page.html".into(),
list: None,
},
feeds: vec![],
},
];
let loaded: Vec<Taxonomy> =
serde_yaml::from_str(TAXONOMIES).expect("bad test data: TAXONOMIES");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info() {
const SITE_INFO: &'static str = "\
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata:
foo: bar
quux: 2
";
let mut metadata = serde_yaml::Mapping::new();
metadata.insert("foo".into(), "bar".into());
metadata.insert("quux".into(), 2.into());
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: metadata,
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO).expect("bad test data: SITE_INFO");
assert_eq!(expected, loaded);
}
#[test]
fn parses_site_info_with_empty_metadata() {
const SITE_INFO_EMPTY_METADATA: &'static str = "
title: lx (lightning)
url: https://lightning.rs
description: >
A ridiculously fast site generator and engine.
metadata: ~
";
let expected = SiteInfo {
title: "lx (lightning)".into(),
url: String::from("https://lightning.rs"),
description: Some("A ridiculously fast site generator and engine.\n".into()),
metadata: serde_yaml::Mapping::new(),
};
let loaded: SiteInfo = serde_yaml::from_str(SITE_INFO_EMPTY_METADATA)
.expect("bad test data: SITE_INFO_EMPTY_METADATA");
assert_eq!(expected, loaded);
}
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate toml;
use std::cell::Cell;
use std::fs;
use std::fs::File;
use std::env;
use std::io::{Error, ErrorKind, Read};
use std::path::{Path, PathBuf};
use file_lines::FileLines;
use lists::{SeparatorTactic, ListTactic};
macro_rules! configuration_option_enum{
($e:ident: $( $x:ident ),+ $(,)*) => {
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum $e {
$( $x ),+
}
impl_enum_serialize_and_deserialize!($e, $( $x ),+);
}
}
configuration_option_enum! { Style:
Rfc, // Follow the style RFCs style.
Legacy, // Follow the traditional Rustfmt style.
}
configuration_option_enum! { NewlineStyle:
Windows, // \r\n
Unix, // \n
Native, // \r\n in Windows, \n on other platforms
}
configuration_option_enum! { BraceStyle:
AlwaysNextLine,
PreferSameLine,
// Prefer same line except where there is a where clause, in which case force
// the brace to the next line.
SameLineWhere,
}
configuration_option_enum! { ControlBraceStyle:
// K&R style, Rust community default
AlwaysSameLine,
// Stroustrup style
ClosingNextLine,
// Allman style
AlwaysNextLine,
}
// How to indent a function's return type.
configuration_option_enum! { ReturnIndent:
// Aligned with the arguments
WithArgs,
// Aligned with the where clause
WithWhereClause,
}
configuration_option_enum! { IndentStyle:
// First line on the same line as the opening brace, all lines aligned with
// the first line.
Visual,
// First line is on a new line and all lines align with block indent.
Block,
}
configuration_option_enum! { Density:
// Fit as much on one line as possible.
Compressed,
// Use more lines.
Tall,
// Try to compress if the body is empty.
CompressedIfEmpty,
// Place every item on a separate line.
Vertical,
}
configuration_option_enum! { TypeDensity:
// No spaces around "=" and "+"
Compressed,
// Spaces around " = " and " + "
Wide,
}
impl Density {
pub fn to_list_tactic(self) -> ListTactic {
match self {
Density::Compressed => ListTactic::Mixed,
Density::Tall |
Density::CompressedIfEmpty => ListTactic::HorizontalVertical,
Density::Vertical => ListTactic::Vertical,
}
}
}
configuration_option_enum! { LicensePolicy:
// Do not place license text at top of files
NoLicense,
// Use the text in "license" field as the license
TextLicense,
// Use a text file as the license text
FileLicense,
}
configuration_option_enum! { MultilineStyle:
// Use horizontal layout if it fits in one line, fall back to vertical
PreferSingle,
// Use vertical layout
ForceMulti,
}
impl MultilineStyle {
pub fn to_list_tactic(self) -> ListTactic {
match self {
MultilineStyle::PreferSingle => ListTactic::HorizontalVertical,
MultilineStyle::ForceMulti => ListTactic::Vertical,
}
}
}
configuration_option_enum! { ReportTactic:
Always,
Unnumbered,
Never,
}
configuration_option_enum! { WriteMode:
// Backs the original file up and overwrites the original.
Replace,
// Overwrites original file without backup.
Overwrite,
// Writes the output to stdout.
Display,
// Writes the diff to stdout.
Diff,
// Displays how much of the input file was processed
Coverage,
// Unfancy stdout
Plain,
// Outputs a checkstyle XML file.
Checkstyle,
}
/// Trait for types that can be used in `Config`.
pub trait ConfigType: Sized {
/// Returns hint text for use in `Config::print_docs()`. For enum types, this is a
/// pipe-separated list of variants; for other types it returns "<type>".
fn doc_hint() -> String;
}
impl ConfigType for bool {
fn doc_hint() -> String {
String::from("<boolean>")
}
}
impl ConfigType for usize {
fn doc_hint() -> String {
String::from("<unsigned integer>")
}
}
impl ConfigType for isize {
fn doc_hint() -> String {
String::from("<signed integer>")
}
}
impl ConfigType for String {
fn doc_hint() -> String {
String::from("<string>")
}
}
impl ConfigType for FileLines {
fn doc_hint() -> String {
String::from("<json>")
}
}
pub struct ConfigHelpItem {
option_name: &'static str,
doc_string: &'static str,
variant_names: String,
default: &'static str,
}
impl ConfigHelpItem {
pub fn option_name(&self) -> &'static str {
self.option_name
}
pub fn doc_string(&self) -> &'static str {
self.doc_string
}
pub fn variant_names(&self) -> &String {
&self.variant_names
}
pub fn default(&self) -> &'static str {
self.default
}
}
macro_rules! create_config {
($($i:ident: $ty:ty, $def:expr, $( $dstring:expr ),+ );+ $(;)*) => (
#[derive(Clone)]
pub struct Config {
// For each config item, we store a bool indicating whether it has
// been accessed and the value.
$($i: (Cell<bool>, $ty)),+
}
// Just like the Config struct but with each property wrapped
// as Option<T>. This is used to parse a rustfmt.toml that doesn't
// specity all properties of `Config`.
// We first parse into `PartialConfig`, then create a default `Config`
// and overwrite the properties with corresponding values from `PartialConfig`.
#[derive(Deserialize, Serialize, Clone)]
pub struct PartialConfig {
$(pub $i: Option<$ty>),+
}
impl PartialConfig {
pub fn to_toml(&self) -> Result<String, String> {
// file_lines can't be specified in TOML
let mut cloned = self.clone();
cloned.file_lines = None;
toml::to_string(&cloned)
.map_err(|e| format!("Could not output config: {}", e.to_string()))
}
}
// Macro hygiene won't allow us to make `set_$i()` methods on Config
// for each item, so this struct is used to give the API to set values:
// `config.get().option(false)`. It's pretty ugly. Consider replacing
// with `config.set_option(false)` if we ever get a stable/usable
// `concat_idents!()`.
pub struct ConfigSetter<'a>(&'a mut Config);
impl<'a> ConfigSetter<'a> {
$(
pub fn $i(&mut self, value: $ty) {
(self.0).$i.1 = value;
}
)+
}
impl Config {
$(
pub fn $i(&self) -> $ty {
self.$i.0.set(true);
self.$i.1.clone()
}
)+
pub fn set<'a>(&'a mut self) -> ConfigSetter<'a> {
ConfigSetter(self)
}
fn fill_from_parsed_config(mut self, parsed: PartialConfig) -> Config {
$(
if let Some(val) = parsed.$i {
self.$i.1 = val;
}
)+
self
}
pub fn from_toml(toml: &str) -> Result<Config, String> {
let parsed: toml::Value =
toml.parse().map_err(|e| format!("Could not parse TOML: {}", e))?;
let mut err: String = String::new();
{
let table = parsed
.as_table()
.ok_or(String::from("Parsed config was not table"))?;
for (key, _) in table {
match &**key {
$(
stringify!($i) => (),
)+
_ => {
let msg =
&format!("Warning: Unknown configuration option `{}`\n",
key);
err.push_str(msg)
}
}
}
}
match parsed.try_into() {
Ok(parsed_config) =>
Ok(Config::default().fill_from_parsed_config(parsed_config)),
Err(e) => {
err.push_str("Error: Decoding config file failed:\n");
err.push_str(format!("{}\n", e).as_str());
err.push_str("Please check your config file.\n");
Err(err)
}
}
}
pub fn used_options(&self) -> PartialConfig {
PartialConfig {
$(
$i: if self.$i.0.get() {
Some(self.$i.1.clone())
} else {
None
},
)+
}
}
pub fn all_options(&self) -> PartialConfig {
PartialConfig {
$(
$i: Some(self.$i.1.clone()),
)+
}
}
pub fn override_value(&mut self, key: &str, val: &str)
{
match key {
$(
stringify!($i) => {
self.$i.1 = val.parse::<$ty>()
.expect(&format!("Failed to parse override for {} (\"{}\") as a {}",
stringify!($i),
val,
stringify!($ty)));
}
)+
_ => panic!("Unknown config key in override: {}", key)
}
}
/// Construct a `Config` from the toml file specified at `file_path`.
///
/// This method only looks at the provided path, for a method that
/// searches parents for a `rustfmt.toml` see `from_resolved_toml_path`.
///
/// Return a `Config` if the config could be read and parsed from
/// the file, Error otherwise.
pub fn from_toml_path(file_path: &Path) -> Result<Config, Error> {
let mut file = File::open(&file_path)?;
let mut toml = String::new();
file.read_to_string(&mut toml)?;
Config::from_toml(&toml).map_err(|err| Error::new(ErrorKind::InvalidData, err))
}
/// Resolve the config for input in `dir`.
///
/// Searches for `rustfmt.toml` beginning with `dir`, and
/// recursively checking parents of `dir` if no config file is found.
/// If no config file exists in `dir` or in any parent, a
/// default `Config` will be returned (and the returned path will be empty).
///
/// Returns the `Config` to use, and the path of the project file if there was
/// one.
pub fn from_resolved_toml_path(dir: &Path) -> Result<(Config, Option<PathBuf>), Error> {
/// Try to find a project file in the given directory and its parents.
/// Returns the path of a the nearest project file if one exists,
/// or `None` if no project file was found.
fn resolve_project_file(dir: &Path) -> Result<Option<PathBuf>, Error> {
let mut current = if dir.is_relative() {
env::current_dir()?.join(dir)
} else {
dir.to_path_buf()
};
current = fs::canonicalize(current)?;
loop {
match get_toml_path(¤t) {
Ok(Some(path)) => return Ok(Some(path)),
Err(e) => return Err(e),
_ => ()
}
// If the current directory has no parent, we're done searching.
if !current.pop() {
return Ok(None);
}
}
}
match resolve_project_file(dir)? {
None => Ok((Config::default(), None)),
Some(path) => Config::from_toml_path(&path).map(|config| (config, Some(path))),
}
}
pub fn print_docs() {
use std::cmp;
let max = 0;
$( let max = cmp::max(max, stringify!($i).len()+1); )+
let mut space_str = String::with_capacity(max);
for _ in 0..max {
space_str.push(' ');
}
println!("Configuration Options:");
$(
let name_raw = stringify!($i);
let mut name_out = String::with_capacity(max);
for _ in name_raw.len()..max-1 {
name_out.push(' ')
}
name_out.push_str(name_raw);
name_out.push(' ');
println!("{}{} Default: {:?}",
name_out,
<$ty>::doc_hint(),
$def);
$(
println!("{}{}", space_str, $dstring);
)+
println!("");
)+
}
}
// Template for the default configuration
impl Default for Config {
fn default() -> Config {
Config {
$(
$i: (Cell::new(false), $def),
)+
}
}
}
)
}
/// Check for the presence of known config file names (`rustfmt.toml, `.rustfmt.toml`) in `dir`
///
/// Return the path if a config file exists, empty if no file exists, and Error for IO errors
pub fn get_toml_path(dir: &Path) -> Result<Option<PathBuf>, Error> {
const CONFIG_FILE_NAMES: [&'static str; 2] = [".rustfmt.toml", "rustfmt.toml"];
for config_file_name in &CONFIG_FILE_NAMES {
let config_file = dir.join(config_file_name);
match fs::metadata(&config_file) {
// Only return if it's a file to handle the unlikely situation of a directory named
// `rustfmt.toml`.
Ok(ref md) if md.is_file() => return Ok(Some(config_file)),
// Return the error if it's something other than `NotFound`; otherwise we didn't
// find the project file yet, and continue searching.
Err(e) => {
if e.kind() != ErrorKind::NotFound {
return Err(e);
}
}
_ => {}
}
}
Ok(None)
}
create_config! {
verbose: bool, false, "Use verbose output";
disable_all_formatting: bool, false, "Don't reformat anything";
skip_children: bool, false, "Don't reformat out of line modules";
file_lines: FileLines, FileLines::all(),
"Lines to format; this is not supported in rustfmt.toml, and can only be specified \
via the --file-lines option";
max_width: usize, 100, "Maximum width of each line";
error_on_line_overflow: bool, true, "Error if unable to get all lines within max_width";
tab_spaces: usize, 4, "Number of spaces per tab";
fn_call_width: usize, 60,
"Maximum width of the args of a function call before falling back to vertical formatting";
struct_lit_width: usize, 18,
"Maximum width in the body of a struct lit before falling back to vertical formatting";
struct_variant_width: usize, 35,
"Maximum width in the body of a struct variant before falling back to vertical formatting";
force_explicit_abi: bool, true, "Always print the abi for extern items";
newline_style: NewlineStyle, NewlineStyle::Unix, "Unix or Windows line endings";
fn_brace_style: BraceStyle, BraceStyle::SameLineWhere, "Brace style for functions";
item_brace_style: BraceStyle, BraceStyle::SameLineWhere, "Brace style for structs and enums";
control_style: Style, Style::Rfc, "Indent style for control flow statements";
control_brace_style: ControlBraceStyle, ControlBraceStyle::AlwaysSameLine,
"Brace style for control flow constructs";
impl_empty_single_line: bool, true, "Put empty-body implementations on a single line";
trailing_comma: SeparatorTactic, SeparatorTactic::Vertical,
"How to handle trailing commas for lists";
fn_empty_single_line: bool, true, "Put empty-body functions on a single line";
fn_single_line: bool, false, "Put single-expression functions on a single line";
fn_return_indent: ReturnIndent, ReturnIndent::WithArgs,
"Location of return type in function declaration";
fn_args_paren_newline: bool, false, "If function argument parenthesis goes on a newline";
fn_args_density: Density, Density::Tall, "Argument density in functions";
fn_args_layout: IndentStyle, IndentStyle::Block,
"Layout of function arguments and tuple structs";
array_layout: IndentStyle, IndentStyle::Block, "Indent on arrays";
array_width: usize, 60,
"Maximum width of an array literal before falling back to vertical formatting";
type_punctuation_density: TypeDensity, TypeDensity::Wide,
"Determines if '+' or '=' are wrapped in spaces in the punctuation of types";
where_style: Style, Style::Rfc, "Overall strategy for where clauses";
// TODO:
// 1. Should we at least try to put the where clause on the same line as the rest of the
// function decl?
// 2. Currently options `Tall` and `Vertical` produce the same output.
where_density: Density, Density::CompressedIfEmpty, "Density of a where clause";
where_layout: ListTactic, ListTactic::Vertical, "Element layout inside a where clause";
where_pred_indent: IndentStyle, IndentStyle::Visual,
"Indentation style of a where predicate";
generics_indent: IndentStyle, IndentStyle::Block, "Indentation of generics";
struct_lit_style: IndentStyle, IndentStyle::Block, "Style of struct definition";
struct_lit_multiline_style: MultilineStyle, MultilineStyle::PreferSingle,
"Multiline style on literal structs";
fn_call_style: IndentStyle, IndentStyle::Block, "Indentation for function calls, etc.";
report_todo: ReportTactic, ReportTactic::Never,
"Report all, none or unnumbered occurrences of TODO in source file comments";
report_fixme: ReportTactic, ReportTactic::Never,
"Report all, none or unnumbered occurrences of FIXME in source file comments";
chain_indent: IndentStyle, IndentStyle::Block, "Indentation of chain";
chain_one_line_max: usize, 60, "Maximum length of a chain to fit on a single line";
chain_split_single_child: bool, false, "Split a chain with a single child if its length \
exceeds `chain_one_line_max`";
reorder_imports: bool, false, "Reorder import statements alphabetically";
reorder_imports_in_group: bool, false, "Reorder import statements in group";
reorder_imported_names: bool, false,
"Reorder lists of names in import statements alphabetically";
single_line_if_else_max_width: usize, 50, "Maximum line length for single line if-else \
expressions. A value of zero means always break \
if-else expressions.";
format_strings: bool, false, "Format string literals where necessary";
force_format_strings: bool, false, "Always format string literals";
take_source_hints: bool, false, "Retain some formatting characteristics from the source code";
hard_tabs: bool, false, "Use tab characters for indentation, spaces for alignment";
wrap_comments: bool, false, "Break comments to fit on the line";
comment_width: usize, 80, "Maximum length of comments. No effect unless wrap_comments = true";
normalize_comments: bool, false, "Convert /* */ comments to // comments where possible";
wrap_match_arms: bool, true, "Wrap multiline match arms in blocks";
match_block_trailing_comma: bool, false,
"Put a trailing comma after a block based match arm (non-block arms are not affected)";
indent_match_arms: bool, true, "Indent match arms instead of keeping them at the same \
indentation level as the match keyword";
closure_block_indent_threshold: isize, 7, "How many lines a closure must have before it is \
block indented. -1 means never use block indent.";
space_before_type_annotation: bool, false,
"Leave a space before the colon in a type annotation";
space_after_type_annotation_colon: bool, true,
"Leave a space after the colon in a type annotation";
space_before_struct_lit_field_colon: bool, false,
"Leave a space before the colon in a struct literal field";
space_after_struct_lit_field_colon: bool, true,
"Leave a space after the colon in a struct literal field";
space_before_bound: bool, false, "Leave a space before the colon in a trait or lifetime bound";
space_after_bound_colon: bool, true,
"Leave a space after the colon in a trait or lifetime bound";
spaces_around_ranges: bool, false, "Put spaces around the .. and ... range operators";
spaces_within_angle_brackets: bool, false, "Put spaces within non-empty generic arguments";
spaces_within_square_brackets: bool, false, "Put spaces within non-empty square brackets";
spaces_within_parens: bool, false, "Put spaces within non-empty parentheses";
use_try_shorthand: bool, false, "Replace uses of the try! macro by the ? shorthand";
write_mode: WriteMode, WriteMode::Replace,
"What Write Mode to use when none is supplied: Replace, Overwrite, Display, Diff, Coverage";
condense_wildcard_suffixes: bool, false, "Replace strings of _ wildcards by a single .. in \
tuple patterns";
combine_control_expr: bool, true, "Combine control expressions with funciton calls."
}
#[cfg(test)]
mod test {
use super::Config;
#[test]
fn test_config_set() {
let mut config = Config::default();
config.set().verbose(false);
assert_eq!(config.verbose(), false);
config.set().verbose(true);
assert_eq!(config.verbose(), true);
}
#[test]
fn test_config_used_to_toml() {
let config = Config::default();
let verbose = config.verbose();
let skip_children = config.skip_children();
let used_options = config.used_options();
let toml = used_options.to_toml().unwrap();
assert_eq!(
toml,
format!("verbose = {}\nskip_children = {}\n", verbose, skip_children)
);
}
}
Config knows if an option was default or set
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate toml;
use std::cell::Cell;
use std::fs;
use std::fs::File;
use std::env;
use std::io::{Error, ErrorKind, Read};
use std::path::{Path, PathBuf};
use file_lines::FileLines;
use lists::{SeparatorTactic, ListTactic};
macro_rules! configuration_option_enum{
($e:ident: $( $x:ident ),+ $(,)*) => {
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum $e {
$( $x ),+
}
impl_enum_serialize_and_deserialize!($e, $( $x ),+);
}
}
configuration_option_enum! { Style:
Rfc, // Follow the style RFCs style.
Legacy, // Follow the traditional Rustfmt style.
}
configuration_option_enum! { NewlineStyle:
Windows, // \r\n
Unix, // \n
Native, // \r\n in Windows, \n on other platforms
}
configuration_option_enum! { BraceStyle:
AlwaysNextLine,
PreferSameLine,
// Prefer same line except where there is a where clause, in which case force
// the brace to the next line.
SameLineWhere,
}
configuration_option_enum! { ControlBraceStyle:
// K&R style, Rust community default
AlwaysSameLine,
// Stroustrup style
ClosingNextLine,
// Allman style
AlwaysNextLine,
}
// How to indent a function's return type.
configuration_option_enum! { ReturnIndent:
// Aligned with the arguments
WithArgs,
// Aligned with the where clause
WithWhereClause,
}
configuration_option_enum! { IndentStyle:
// First line on the same line as the opening brace, all lines aligned with
// the first line.
Visual,
// First line is on a new line and all lines align with block indent.
Block,
}
configuration_option_enum! { Density:
// Fit as much on one line as possible.
Compressed,
// Use more lines.
Tall,
// Try to compress if the body is empty.
CompressedIfEmpty,
// Place every item on a separate line.
Vertical,
}
configuration_option_enum! { TypeDensity:
// No spaces around "=" and "+"
Compressed,
// Spaces around " = " and " + "
Wide,
}
impl Density {
pub fn to_list_tactic(self) -> ListTactic {
match self {
Density::Compressed => ListTactic::Mixed,
Density::Tall |
Density::CompressedIfEmpty => ListTactic::HorizontalVertical,
Density::Vertical => ListTactic::Vertical,
}
}
}
configuration_option_enum! { LicensePolicy:
// Do not place license text at top of files
NoLicense,
// Use the text in "license" field as the license
TextLicense,
// Use a text file as the license text
FileLicense,
}
configuration_option_enum! { MultilineStyle:
// Use horizontal layout if it fits in one line, fall back to vertical
PreferSingle,
// Use vertical layout
ForceMulti,
}
impl MultilineStyle {
pub fn to_list_tactic(self) -> ListTactic {
match self {
MultilineStyle::PreferSingle => ListTactic::HorizontalVertical,
MultilineStyle::ForceMulti => ListTactic::Vertical,
}
}
}
configuration_option_enum! { ReportTactic:
Always,
Unnumbered,
Never,
}
configuration_option_enum! { WriteMode:
// Backs the original file up and overwrites the original.
Replace,
// Overwrites original file without backup.
Overwrite,
// Writes the output to stdout.
Display,
// Writes the diff to stdout.
Diff,
// Displays how much of the input file was processed
Coverage,
// Unfancy stdout
Plain,
// Outputs a checkstyle XML file.
Checkstyle,
}
/// Trait for types that can be used in `Config`.
pub trait ConfigType: Sized {
/// Returns hint text for use in `Config::print_docs()`. For enum types, this is a
/// pipe-separated list of variants; for other types it returns "<type>".
fn doc_hint() -> String;
}
impl ConfigType for bool {
fn doc_hint() -> String {
String::from("<boolean>")
}
}
impl ConfigType for usize {
fn doc_hint() -> String {
String::from("<unsigned integer>")
}
}
impl ConfigType for isize {
fn doc_hint() -> String {
String::from("<signed integer>")
}
}
impl ConfigType for String {
fn doc_hint() -> String {
String::from("<string>")
}
}
impl ConfigType for FileLines {
fn doc_hint() -> String {
String::from("<json>")
}
}
pub struct ConfigHelpItem {
option_name: &'static str,
doc_string: &'static str,
variant_names: String,
default: &'static str,
}
impl ConfigHelpItem {
pub fn option_name(&self) -> &'static str {
self.option_name
}
pub fn doc_string(&self) -> &'static str {
self.doc_string
}
pub fn variant_names(&self) -> &String {
&self.variant_names
}
pub fn default(&self) -> &'static str {
self.default
}
}
macro_rules! create_config {
($($i:ident: $ty:ty, $def:expr, $( $dstring:expr ),+ );+ $(;)*) => (
#[derive(Clone)]
pub struct Config {
// For each config item, we store a bool indicating whether it has
// been accessed and the value, and a bool whether the option was
// manually initialised, or taken from the default,
$($i: (Cell<bool>, bool, $ty)),+
}
// Just like the Config struct but with each property wrapped
// as Option<T>. This is used to parse a rustfmt.toml that doesn't
// specity all properties of `Config`.
// We first parse into `PartialConfig`, then create a default `Config`
// and overwrite the properties with corresponding values from `PartialConfig`.
#[derive(Deserialize, Serialize, Clone)]
pub struct PartialConfig {
$(pub $i: Option<$ty>),+
}
impl PartialConfig {
pub fn to_toml(&self) -> Result<String, String> {
// file_lines can't be specified in TOML
let mut cloned = self.clone();
cloned.file_lines = None;
toml::to_string(&cloned)
.map_err(|e| format!("Could not output config: {}", e.to_string()))
}
}
// Macro hygiene won't allow us to make `set_$i()` methods on Config
// for each item, so this struct is used to give the API to set values:
// `config.get().option(false)`. It's pretty ugly. Consider replacing
// with `config.set_option(false)` if we ever get a stable/usable
// `concat_idents!()`.
pub struct ConfigSetter<'a>(&'a mut Config);
impl<'a> ConfigSetter<'a> {
$(
pub fn $i(&mut self, value: $ty) {
(self.0).$i.2 = value;
}
)+
}
// Query each option, returns true if the user set the option, false if
// a default was used.
pub struct ConfigWasSet<'a>(&'a Config);
impl<'a> ConfigWasSet<'a> {
$(
pub fn $i(&self) -> bool {
(self.0).$i.1
}
)+
}
impl Config {
$(
pub fn $i(&self) -> $ty {
self.$i.0.set(true);
self.$i.2.clone()
}
)+
pub fn set<'a>(&'a mut self) -> ConfigSetter<'a> {
ConfigSetter(self)
}
pub fn was_set<'a>(&'a self) -> ConfigWasSet<'a> {
ConfigWasSet(self)
}
fn fill_from_parsed_config(mut self, parsed: PartialConfig) -> Config {
$(
if let Some(val) = parsed.$i {
self.$i.1 = true;
self.$i.2 = val;
}
)+
self
}
pub fn from_toml(toml: &str) -> Result<Config, String> {
let parsed: toml::Value =
toml.parse().map_err(|e| format!("Could not parse TOML: {}", e))?;
let mut err: String = String::new();
{
let table = parsed
.as_table()
.ok_or(String::from("Parsed config was not table"))?;
for (key, _) in table {
match &**key {
$(
stringify!($i) => (),
)+
_ => {
let msg =
&format!("Warning: Unknown configuration option `{}`\n",
key);
err.push_str(msg)
}
}
}
}
match parsed.try_into() {
Ok(parsed_config) =>
Ok(Config::default().fill_from_parsed_config(parsed_config)),
Err(e) => {
err.push_str("Error: Decoding config file failed:\n");
err.push_str(format!("{}\n", e).as_str());
err.push_str("Please check your config file.\n");
Err(err)
}
}
}
pub fn used_options(&self) -> PartialConfig {
PartialConfig {
$(
$i: if self.$i.0.get() {
Some(self.$i.2.clone())
} else {
None
},
)+
}
}
pub fn all_options(&self) -> PartialConfig {
PartialConfig {
$(
$i: Some(self.$i.2.clone()),
)+
}
}
pub fn override_value(&mut self, key: &str, val: &str)
{
match key {
$(
stringify!($i) => {
self.$i.2 = val.parse::<$ty>()
.expect(&format!("Failed to parse override for {} (\"{}\") as a {}",
stringify!($i),
val,
stringify!($ty)));
}
)+
_ => panic!("Unknown config key in override: {}", key)
}
}
/// Construct a `Config` from the toml file specified at `file_path`.
///
/// This method only looks at the provided path, for a method that
/// searches parents for a `rustfmt.toml` see `from_resolved_toml_path`.
///
/// Return a `Config` if the config could be read and parsed from
/// the file, Error otherwise.
pub fn from_toml_path(file_path: &Path) -> Result<Config, Error> {
let mut file = File::open(&file_path)?;
let mut toml = String::new();
file.read_to_string(&mut toml)?;
Config::from_toml(&toml).map_err(|err| Error::new(ErrorKind::InvalidData, err))
}
/// Resolve the config for input in `dir`.
///
/// Searches for `rustfmt.toml` beginning with `dir`, and
/// recursively checking parents of `dir` if no config file is found.
/// If no config file exists in `dir` or in any parent, a
/// default `Config` will be returned (and the returned path will be empty).
///
/// Returns the `Config` to use, and the path of the project file if there was
/// one.
pub fn from_resolved_toml_path(dir: &Path) -> Result<(Config, Option<PathBuf>), Error> {
/// Try to find a project file in the given directory and its parents.
/// Returns the path of a the nearest project file if one exists,
/// or `None` if no project file was found.
fn resolve_project_file(dir: &Path) -> Result<Option<PathBuf>, Error> {
let mut current = if dir.is_relative() {
env::current_dir()?.join(dir)
} else {
dir.to_path_buf()
};
current = fs::canonicalize(current)?;
loop {
match get_toml_path(¤t) {
Ok(Some(path)) => return Ok(Some(path)),
Err(e) => return Err(e),
_ => ()
}
// If the current directory has no parent, we're done searching.
if !current.pop() {
return Ok(None);
}
}
}
match resolve_project_file(dir)? {
None => Ok((Config::default(), None)),
Some(path) => Config::from_toml_path(&path).map(|config| (config, Some(path))),
}
}
pub fn print_docs() {
use std::cmp;
let max = 0;
$( let max = cmp::max(max, stringify!($i).len()+1); )+
let mut space_str = String::with_capacity(max);
for _ in 0..max {
space_str.push(' ');
}
println!("Configuration Options:");
$(
let name_raw = stringify!($i);
let mut name_out = String::with_capacity(max);
for _ in name_raw.len()..max-1 {
name_out.push(' ')
}
name_out.push_str(name_raw);
name_out.push(' ');
println!("{}{} Default: {:?}",
name_out,
<$ty>::doc_hint(),
$def);
$(
println!("{}{}", space_str, $dstring);
)+
println!("");
)+
}
}
// Template for the default configuration
impl Default for Config {
fn default() -> Config {
Config {
$(
$i: (Cell::new(false), false, $def),
)+
}
}
}
)
}
/// Check for the presence of known config file names (`rustfmt.toml, `.rustfmt.toml`) in `dir`
///
/// Return the path if a config file exists, empty if no file exists, and Error for IO errors
pub fn get_toml_path(dir: &Path) -> Result<Option<PathBuf>, Error> {
const CONFIG_FILE_NAMES: [&'static str; 2] = [".rustfmt.toml", "rustfmt.toml"];
for config_file_name in &CONFIG_FILE_NAMES {
let config_file = dir.join(config_file_name);
match fs::metadata(&config_file) {
// Only return if it's a file to handle the unlikely situation of a directory named
// `rustfmt.toml`.
Ok(ref md) if md.is_file() => return Ok(Some(config_file)),
// Return the error if it's something other than `NotFound`; otherwise we didn't
// find the project file yet, and continue searching.
Err(e) => {
if e.kind() != ErrorKind::NotFound {
return Err(e);
}
}
_ => {}
}
}
Ok(None)
}
create_config! {
verbose: bool, false, "Use verbose output";
disable_all_formatting: bool, false, "Don't reformat anything";
skip_children: bool, false, "Don't reformat out of line modules";
file_lines: FileLines, FileLines::all(),
"Lines to format; this is not supported in rustfmt.toml, and can only be specified \
via the --file-lines option";
max_width: usize, 100, "Maximum width of each line";
error_on_line_overflow: bool, true, "Error if unable to get all lines within max_width";
tab_spaces: usize, 4, "Number of spaces per tab";
fn_call_width: usize, 60,
"Maximum width of the args of a function call before falling back to vertical formatting";
struct_lit_width: usize, 18,
"Maximum width in the body of a struct lit before falling back to vertical formatting";
struct_variant_width: usize, 35,
"Maximum width in the body of a struct variant before falling back to vertical formatting";
force_explicit_abi: bool, true, "Always print the abi for extern items";
newline_style: NewlineStyle, NewlineStyle::Unix, "Unix or Windows line endings";
fn_brace_style: BraceStyle, BraceStyle::SameLineWhere, "Brace style for functions";
item_brace_style: BraceStyle, BraceStyle::SameLineWhere, "Brace style for structs and enums";
control_style: Style, Style::Rfc, "Indent style for control flow statements";
control_brace_style: ControlBraceStyle, ControlBraceStyle::AlwaysSameLine,
"Brace style for control flow constructs";
impl_empty_single_line: bool, true, "Put empty-body implementations on a single line";
trailing_comma: SeparatorTactic, SeparatorTactic::Vertical,
"How to handle trailing commas for lists";
fn_empty_single_line: bool, true, "Put empty-body functions on a single line";
fn_single_line: bool, false, "Put single-expression functions on a single line";
fn_return_indent: ReturnIndent, ReturnIndent::WithArgs,
"Location of return type in function declaration";
fn_args_paren_newline: bool, false, "If function argument parenthesis goes on a newline";
fn_args_density: Density, Density::Tall, "Argument density in functions";
fn_args_layout: IndentStyle, IndentStyle::Block,
"Layout of function arguments and tuple structs";
array_layout: IndentStyle, IndentStyle::Block, "Indent on arrays";
array_width: usize, 60,
"Maximum width of an array literal before falling back to vertical formatting";
type_punctuation_density: TypeDensity, TypeDensity::Wide,
"Determines if '+' or '=' are wrapped in spaces in the punctuation of types";
where_style: Style, Style::Rfc, "Overall strategy for where clauses";
// TODO:
// 1. Should we at least try to put the where clause on the same line as the rest of the
// function decl?
// 2. Currently options `Tall` and `Vertical` produce the same output.
where_density: Density, Density::CompressedIfEmpty, "Density of a where clause";
where_layout: ListTactic, ListTactic::Vertical, "Element layout inside a where clause";
where_pred_indent: IndentStyle, IndentStyle::Visual,
"Indentation style of a where predicate";
generics_indent: IndentStyle, IndentStyle::Block, "Indentation of generics";
struct_lit_style: IndentStyle, IndentStyle::Block, "Style of struct definition";
struct_lit_multiline_style: MultilineStyle, MultilineStyle::PreferSingle,
"Multiline style on literal structs";
fn_call_style: IndentStyle, IndentStyle::Block, "Indentation for function calls, etc.";
report_todo: ReportTactic, ReportTactic::Never,
"Report all, none or unnumbered occurrences of TODO in source file comments";
report_fixme: ReportTactic, ReportTactic::Never,
"Report all, none or unnumbered occurrences of FIXME in source file comments";
chain_indent: IndentStyle, IndentStyle::Block, "Indentation of chain";
chain_one_line_max: usize, 60, "Maximum length of a chain to fit on a single line";
chain_split_single_child: bool, false, "Split a chain with a single child if its length \
exceeds `chain_one_line_max`";
reorder_imports: bool, false, "Reorder import statements alphabetically";
reorder_imports_in_group: bool, false, "Reorder import statements in group";
reorder_imported_names: bool, false,
"Reorder lists of names in import statements alphabetically";
single_line_if_else_max_width: usize, 50, "Maximum line length for single line if-else \
expressions. A value of zero means always break \
if-else expressions.";
format_strings: bool, false, "Format string literals where necessary";
force_format_strings: bool, false, "Always format string literals";
take_source_hints: bool, false, "Retain some formatting characteristics from the source code";
hard_tabs: bool, false, "Use tab characters for indentation, spaces for alignment";
wrap_comments: bool, false, "Break comments to fit on the line";
comment_width: usize, 80, "Maximum length of comments. No effect unless wrap_comments = true";
normalize_comments: bool, false, "Convert /* */ comments to // comments where possible";
wrap_match_arms: bool, true, "Wrap multiline match arms in blocks";
match_block_trailing_comma: bool, false,
"Put a trailing comma after a block based match arm (non-block arms are not affected)";
indent_match_arms: bool, true, "Indent match arms instead of keeping them at the same \
indentation level as the match keyword";
closure_block_indent_threshold: isize, 7, "How many lines a closure must have before it is \
block indented. -1 means never use block indent.";
space_before_type_annotation: bool, false,
"Leave a space before the colon in a type annotation";
space_after_type_annotation_colon: bool, true,
"Leave a space after the colon in a type annotation";
space_before_struct_lit_field_colon: bool, false,
"Leave a space before the colon in a struct literal field";
space_after_struct_lit_field_colon: bool, true,
"Leave a space after the colon in a struct literal field";
space_before_bound: bool, false, "Leave a space before the colon in a trait or lifetime bound";
space_after_bound_colon: bool, true,
"Leave a space after the colon in a trait or lifetime bound";
spaces_around_ranges: bool, false, "Put spaces around the .. and ... range operators";
spaces_within_angle_brackets: bool, false, "Put spaces within non-empty generic arguments";
spaces_within_square_brackets: bool, false, "Put spaces within non-empty square brackets";
spaces_within_parens: bool, false, "Put spaces within non-empty parentheses";
use_try_shorthand: bool, false, "Replace uses of the try! macro by the ? shorthand";
write_mode: WriteMode, WriteMode::Replace,
"What Write Mode to use when none is supplied: Replace, Overwrite, Display, Diff, Coverage";
condense_wildcard_suffixes: bool, false, "Replace strings of _ wildcards by a single .. in \
tuple patterns";
combine_control_expr: bool, true, "Combine control expressions with funciton calls."
}
#[cfg(test)]
mod test {
use super::Config;
#[test]
fn test_config_set() {
let mut config = Config::default();
config.set().verbose(false);
assert_eq!(config.verbose(), false);
config.set().verbose(true);
assert_eq!(config.verbose(), true);
}
#[test]
fn test_config_used_to_toml() {
let config = Config::default();
let verbose = config.verbose();
let skip_children = config.skip_children();
let used_options = config.used_options();
let toml = used_options.to_toml().unwrap();
assert_eq!(
toml,
format!("verbose = {}\nskip_children = {}\n", verbose, skip_children)
);
}
#[test]
fn test_was_set() {
let config = Config::from_toml("hard_tabs = true").unwrap();
assert_eq!(config.was_set().hard_tabs(), true);
assert_eq!(config.was_set().verbose(), false);
}
}
|
//! Mdbook's configuration system.
//!
//! The main entrypoint of the `config` module is the `Config` struct. This acts
//! essentially as a bag of configuration information, with a couple
//! pre-determined tables (`BookConfig` and `BuildConfig`) as well as support
//! for arbitrary data which is exposed to plugins and alternative backends.
//!
//!
//! # Examples
//!
//! ```rust
//! # use mdbook::errors::*;
//! use std::path::PathBuf;
//! use std::str::FromStr;
//! use mdbook::Config;
//! use toml::Value;
//!
//! # fn run() -> Result<()> {
//! let src = r#"
//! [book]
//! title = "My Book"
//! authors = ["Michael-F-Bryan"]
//!
//! [build]
//! src = "out"
//!
//! [other-table.foo]
//! bar = 123
//! "#;
//!
//! // load the `Config` from a toml string
//! let mut cfg = Config::from_str(src)?;
//!
//! // retrieve a nested value
//! let bar = cfg.get("other-table.foo.bar").cloned();
//! assert_eq!(bar, Some(Value::Integer(123)));
//!
//! // Set the `output.html.theme` directory
//! assert!(cfg.get("output.html").is_none());
//! cfg.set("output.html.theme", "./themes");
//!
//! // then load it again, automatically deserializing to a `PathBuf`.
//! let got: Option<PathBuf> = cfg.get_deserialized_opt("output.html.theme")?;
//! assert_eq!(got, Some(PathBuf::from("./themes")));
//! # Ok(())
//! # }
//! # fn main() { run().unwrap() }
//! ```
#![deny(missing_docs)]
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use toml::value::Table;
use toml::{self, Value};
use toml_query::delete::TomlValueDeleteExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::read::TomlValueReadExt;
use crate::errors::*;
use crate::utils;
/// The overall configuration object for MDBook, essentially an in-memory
/// representation of `book.toml`.
#[derive(Debug, Clone, PartialEq)]
pub struct Config {
/// Metadata about the book.
pub book: BookConfig,
/// Information about the build environment.
pub build: BuildConfig,
rest: Value,
}
impl FromStr for Config {
type Err = Error;
/// Load a `Config` from some string.
fn from_str(src: &str) -> Result<Self> {
toml::from_str(src).chain_err(|| Error::from("Invalid configuration file"))
}
}
impl Config {
/// Load the configuration file from disk.
pub fn from_disk<P: AsRef<Path>>(config_file: P) -> Result<Config> {
let mut buffer = String::new();
File::open(config_file)
.chain_err(|| "Unable to open the configuration file")?
.read_to_string(&mut buffer)
.chain_err(|| "Couldn't read the file")?;
Config::from_str(&buffer)
}
/// Updates the `Config` from the available environment variables.
///
/// Variables starting with `MDBOOK_` are used for configuration. The key is
/// created by removing the `MDBOOK_` prefix and turning the resulting
/// string into `kebab-case`. Double underscores (`__`) separate nested
/// keys, while a single underscore (`_`) is replaced with a dash (`-`).
///
/// For example:
///
/// - `MDBOOK_foo` -> `foo`
/// - `MDBOOK_FOO` -> `foo`
/// - `MDBOOK_FOO__BAR` -> `foo.bar`
/// - `MDBOOK_FOO_BAR` -> `foo-bar`
/// - `MDBOOK_FOO_bar__baz` -> `foo-bar.baz`
///
/// So by setting the `MDBOOK_BOOK__TITLE` environment variable you can
/// override the book's title without needing to touch your `book.toml`.
///
/// > **Note:** To facilitate setting more complex config items, the value
/// > of an environment variable is first parsed as JSON, falling back to a
/// > string if the parse fails.
/// >
/// > This means, if you so desired, you could override all book metadata
/// > when building the book with something like
/// >
/// > ```text
/// > $ export MDBOOK_BOOK="{'title': 'My Awesome Book', authors: ['Michael-F-Bryan']}"
/// > $ mdbook build
/// > ```
///
/// The latter case may be useful in situations where `mdbook` is invoked
/// from a script or CI, where it sometimes isn't possible to update the
/// `book.toml` before building.
pub fn update_from_env(&mut self) {
debug!("Updating the config from environment variables");
let overrides =
env::vars().filter_map(|(key, value)| parse_env(&key).map(|index| (index, value)));
for (key, value) in overrides {
trace!("{} => {}", key, value);
let parsed_value = serde_json::from_str(&value)
.unwrap_or_else(|_| serde_json::Value::String(value.to_string()));
self.set(key, parsed_value).expect("unreachable");
}
}
/// Fetch an arbitrary item from the `Config` as a `toml::Value`.
///
/// You can use dotted indices to access nested items (e.g.
/// `output.html.playpen` will fetch the "playpen" out of the html output
/// table).
pub fn get(&self, key: &str) -> Option<&Value> {
self.rest.read(key).unwrap_or(None)
}
/// Fetch a value from the `Config` so you can mutate it.
pub fn get_mut(&mut self, key: &str) -> Option<&mut Value> {
match self.rest.read_mut(key) {
Ok(inner) => inner,
Err(_) => None,
}
}
/// Convenience method for getting the html renderer's configuration.
///
/// # Note
///
/// This is for compatibility only. It will be removed completely once the
/// HTML renderer is refactored to be less coupled to `mdbook` internals.
#[doc(hidden)]
pub fn html_config(&self) -> Option<HtmlConfig> {
match self.get_deserialized_opt("output.html") {
Ok(Some(config)) => Some(config),
Ok(None) => None,
Err(e) => {
utils::log_backtrace(&e.chain_err(|| "Parsing configuration [output.html]"));
None
}
}
}
/// Deprecated, use get_deserialized_opt instead.
#[deprecated = "use get_deserialized_opt instead"]
pub fn get_deserialized<'de, T: Deserialize<'de>, S: AsRef<str>>(&self, name: S) -> Result<T> {
let name = name.as_ref();
match self.get_deserialized_opt(name)? {
Some(value) => Ok(value),
None => bail!("Key not found, {:?}", name),
}
}
/// Convenience function to fetch a value from the config and deserialize it
/// into some arbitrary type.
pub fn get_deserialized_opt<'de, T: Deserialize<'de>, S: AsRef<str>>(
&self,
name: S,
) -> Result<Option<T>> {
let name = name.as_ref();
self.get(name)
.map(|value| {
value
.clone()
.try_into()
.chain_err(|| "Couldn't deserialize the value")
})
.transpose()
}
/// Set a config key, clobbering any existing values along the way.
///
/// The only way this can fail is if we can't serialize `value` into a
/// `toml::Value`.
pub fn set<S: Serialize, I: AsRef<str>>(&mut self, index: I, value: S) -> Result<()> {
let index = index.as_ref();
let value =
Value::try_from(value).chain_err(|| "Unable to represent the item as a JSON Value")?;
if index.starts_with("book.") {
self.book.update_value(&index[5..], value);
} else if index.starts_with("build.") {
self.build.update_value(&index[6..], value);
} else {
self.rest
.insert(index, value)
.map_err(ErrorKind::TomlQueryError)?;
}
Ok(())
}
/// Get the table associated with a particular renderer.
pub fn get_renderer<I: AsRef<str>>(&self, index: I) -> Option<&Table> {
let key = format!("output.{}", index.as_ref());
self.get(&key).and_then(Value::as_table)
}
/// Get the table associated with a particular preprocessor.
pub fn get_preprocessor<I: AsRef<str>>(&self, index: I) -> Option<&Table> {
let key = format!("preprocessor.{}", index.as_ref());
self.get(&key).and_then(Value::as_table)
}
fn from_legacy(mut table: Value) -> Config {
let mut cfg = Config::default();
// we use a macro here instead of a normal loop because the $out
// variable can be different types. This way we can make type inference
// figure out what try_into() deserializes to.
macro_rules! get_and_insert {
($table:expr, $key:expr => $out:expr) => {
let got = $table
.as_table_mut()
.and_then(|t| t.remove($key))
.and_then(|v| v.try_into().ok());
if let Some(value) = got {
$out = value;
}
};
}
get_and_insert!(table, "title" => cfg.book.title);
get_and_insert!(table, "authors" => cfg.book.authors);
get_and_insert!(table, "source" => cfg.book.src);
get_and_insert!(table, "description" => cfg.book.description);
if let Ok(Some(dest)) = table.delete("output.html.destination") {
if let Ok(destination) = dest.try_into() {
cfg.build.build_dir = destination;
}
}
cfg.rest = table;
cfg
}
}
impl Default for Config {
fn default() -> Config {
Config {
book: BookConfig::default(),
build: BuildConfig::default(),
rest: Value::Table(Table::default()),
}
}
}
impl<'de> Deserialize<'de> for Config {
fn deserialize<D: Deserializer<'de>>(de: D) -> std::result::Result<Self, D::Error> {
let raw = Value::deserialize(de)?;
if is_legacy_format(&raw) {
warn!("It looks like you are using the legacy book.toml format.");
warn!("We'll parse it for now, but you should probably convert to the new format.");
warn!("See the mdbook documentation for more details, although as a rule of thumb");
warn!("just move all top level configuration entries like `title`, `author` and");
warn!("`description` under a table called `[book]`, move the `destination` entry");
warn!("from `[output.html]`, renamed to `build-dir`, under a table called");
warn!("`[build]`, and it should all work.");
warn!("Documentation: http://rust-lang.github.io/mdBook/format/config.html");
return Ok(Config::from_legacy(raw));
}
let mut table = match raw {
Value::Table(t) => t,
_ => {
use serde::de::Error;
return Err(D::Error::custom(
"A config file should always be a toml table",
));
}
};
let book: BookConfig = table
.remove("book")
.and_then(|value| value.try_into().ok())
.unwrap_or_default();
let build: BuildConfig = table
.remove("build")
.and_then(|value| value.try_into().ok())
.unwrap_or_default();
Ok(Config {
book,
build,
rest: Value::Table(table),
})
}
}
impl Serialize for Config {
fn serialize<S: Serializer>(&self, s: S) -> std::result::Result<S::Ok, S::Error> {
use serde::ser::Error;
let mut table = self.rest.clone();
let book_config = match Value::try_from(self.book.clone()) {
Ok(cfg) => cfg,
Err(_) => {
return Err(S::Error::custom("Unable to serialize the BookConfig"));
}
};
table.insert("book", book_config).expect("unreachable");
table.serialize(s)
}
}
fn parse_env(key: &str) -> Option<String> {
const PREFIX: &str = "MDBOOK_";
if key.starts_with(PREFIX) {
let key = &key[PREFIX.len()..];
Some(key.to_lowercase().replace("__", ".").replace("_", "-"))
} else {
None
}
}
fn is_legacy_format(table: &Value) -> bool {
let legacy_items = [
"title",
"authors",
"source",
"description",
"output.html.destination",
];
for item in &legacy_items {
if let Ok(Some(_)) = table.read(item) {
return true;
}
}
false
}
/// Configuration options which are specific to the book and required for
/// loading it from disk.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct BookConfig {
/// The book's title.
pub title: Option<String>,
/// The book's authors.
pub authors: Vec<String>,
/// An optional description for the book.
pub description: Option<String>,
/// Location of the book source relative to the book's root directory.
pub src: PathBuf,
/// Does this book support more than one language?
pub multilingual: bool,
/// The main language of the book.
pub language: Option<String>,
}
impl Default for BookConfig {
fn default() -> BookConfig {
BookConfig {
title: None,
authors: Vec::new(),
description: None,
src: PathBuf::from("src"),
multilingual: false,
language: Some(String::from("en")),
}
}
}
/// Configuration for the build procedure.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct BuildConfig {
/// Where to put built artefacts relative to the book's root directory.
pub build_dir: PathBuf,
/// Should non-existent markdown files specified in `SETTINGS.md` be created
/// if they don't exist?
pub create_missing: bool,
/// Should the default preprocessors always be used when they are
/// compatible with the renderer?
pub use_default_preprocessors: bool,
}
impl Default for BuildConfig {
fn default() -> BuildConfig {
BuildConfig {
build_dir: PathBuf::from("book"),
create_missing: true,
use_default_preprocessors: true,
}
}
}
/// Configuration for the HTML renderer.
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct HtmlConfig {
/// The theme directory, if specified.
pub theme: Option<PathBuf>,
/// The default theme to use, defaults to 'light'
pub default_theme: Option<String>,
/// The theme to use if the browser requests the dark version of the site.
/// Defaults to the same as 'default_theme'
pub preferred_dark_theme: Option<String>,
/// Use "smart quotes" instead of the usual `"` character.
pub curly_quotes: bool,
/// Should mathjax be enabled?
pub mathjax_support: bool,
/// An optional google analytics code.
pub google_analytics: Option<String>,
/// Additional CSS stylesheets to include in the rendered page's `<head>`.
pub additional_css: Vec<PathBuf>,
/// Additional JS scripts to include at the bottom of the rendered page's
/// `<body>`.
pub additional_js: Vec<PathBuf>,
/// Fold settings.
pub fold: Fold,
/// Playpen settings.
pub playpen: Playpen,
/// Don't render section labels.
pub no_section_label: bool,
/// Search settings. If `None`, the default will be used.
pub search: Option<Search>,
/// Git repository url. If `None`, the git button will not be shown.
pub git_repository_url: Option<String>,
/// FontAwesome icon class to use for the Git repository link.
/// Defaults to `fa-github` if `None`.
pub git_repository_icon: Option<String>,
/// This is used as a bit of a workaround for the `mdbook serve` command.
/// Basically, because you set the websocket port from the command line, the
/// `mdbook serve` command needs a way to let the HTML renderer know where
/// to point livereloading at, if it has been enabled.
///
/// This config item *should not be edited* by the end user.
#[doc(hidden)]
pub livereload_url: Option<String>,
}
impl HtmlConfig {
/// Returns the directory of theme from the provided root directory. If the
/// directory is not present it will append the default directory of "theme"
pub fn theme_dir(&self, root: &PathBuf) -> PathBuf {
match self.theme {
Some(ref d) => root.join(d),
None => root.join("theme"),
}
}
}
/// Configuration for how to fold chapters of sidebar.
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct Fold {
/// When off, all folds are open. Default: `false`.
pub enable: bool,
/// The higher the more folded regions are open. When level is 0, all folds
/// are closed.
/// Default: `0`.
pub level: u8,
}
/// Configuration for tweaking how the the HTML renderer handles the playpen.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct Playpen {
/// Should playpen snippets be editable? Default: `false`.
pub editable: bool,
/// Display the copy button. Default: `true`.
pub copyable: bool,
/// Copy JavaScript files for the editor to the output directory?
/// Default: `true`.
pub copy_js: bool,
/// Display line numbers on playpen snippets. Default: `false`.
pub line_numbers: bool,
}
impl Default for Playpen {
fn default() -> Playpen {
Playpen {
editable: false,
copyable: true,
copy_js: true,
line_numbers: false,
}
}
}
/// Configuration of the search functionality of the HTML renderer.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct Search {
/// Enable the search feature. Default: `true`.
pub enable: bool,
/// Maximum number of visible results. Default: `30`.
pub limit_results: u32,
/// The number of words used for a search result teaser. Default: `30`.
pub teaser_word_count: u32,
/// Define the logical link between multiple search words.
/// If true, all search words must appear in each result. Default: `false`.
pub use_boolean_and: bool,
/// Boost factor for the search result score if a search word appears in the header.
/// Default: `2`.
pub boost_title: u8,
/// Boost factor for the search result score if a search word appears in the hierarchy.
/// The hierarchy contains all titles of the parent documents and all parent headings.
/// Default: `1`.
pub boost_hierarchy: u8,
/// Boost factor for the search result score if a search word appears in the text.
/// Default: `1`.
pub boost_paragraph: u8,
/// True if the searchword `micro` should match `microwave`. Default: `true`.
pub expand: bool,
/// Documents are split into smaller parts, seperated by headings. This defines, until which
/// level of heading documents should be split. Default: `3`. (`### This is a level 3 heading`)
pub heading_split_level: u8,
/// Copy JavaScript files for the search functionality to the output directory?
/// Default: `true`.
pub copy_js: bool,
}
impl Default for Search {
fn default() -> Search {
// Please update the documentation of `Search` when changing values!
Search {
enable: true,
limit_results: 30,
teaser_word_count: 30,
use_boolean_and: false,
boost_title: 2,
boost_hierarchy: 1,
boost_paragraph: 1,
expand: true,
heading_split_level: 3,
copy_js: true,
}
}
}
/// Allows you to "update" any arbitrary field in a struct by round-tripping via
/// a `toml::Value`.
///
/// This is definitely not the most performant way to do things, which means you
/// should probably keep it away from tight loops...
trait Updateable<'de>: Serialize + Deserialize<'de> {
fn update_value<S: Serialize>(&mut self, key: &str, value: S) {
let mut raw = Value::try_from(&self).expect("unreachable");
if let Ok(value) = Value::try_from(value) {
let _ = raw.insert(key, value);
} else {
return;
}
if let Ok(updated) = raw.try_into() {
*self = updated;
}
}
}
impl<'de, T> Updateable<'de> for T where T: Serialize + Deserialize<'de> {}
#[cfg(test)]
mod tests {
use super::*;
const COMPLEX_CONFIG: &str = r#"
[book]
title = "Some Book"
authors = ["Michael-F-Bryan <michaelfbryan@gmail.com>"]
description = "A completely useless book"
multilingual = true
src = "source"
language = "ja"
[build]
build-dir = "outputs"
create-missing = false
use-default-preprocessors = true
[output.html]
theme = "./themedir"
default-theme = "rust"
curly-quotes = true
google-analytics = "123456"
additional-css = ["./foo/bar/baz.css"]
git-repository-url = "https://foo.com/"
git-repository-icon = "fa-code-fork"
[output.html.playpen]
editable = true
editor = "ace"
[preprocessor.first]
[preprocessor.second]
"#;
#[test]
fn load_a_complex_config_file() {
let src = COMPLEX_CONFIG;
let book_should_be = BookConfig {
title: Some(String::from("Some Book")),
authors: vec![String::from("Michael-F-Bryan <michaelfbryan@gmail.com>")],
description: Some(String::from("A completely useless book")),
multilingual: true,
src: PathBuf::from("source"),
language: Some(String::from("ja")),
};
let build_should_be = BuildConfig {
build_dir: PathBuf::from("outputs"),
create_missing: false,
use_default_preprocessors: true,
};
let playpen_should_be = Playpen {
editable: true,
copyable: true,
copy_js: true,
line_numbers: false,
};
let html_should_be = HtmlConfig {
curly_quotes: true,
google_analytics: Some(String::from("123456")),
additional_css: vec![PathBuf::from("./foo/bar/baz.css")],
theme: Some(PathBuf::from("./themedir")),
default_theme: Some(String::from("rust")),
playpen: playpen_should_be,
git_repository_url: Some(String::from("https://foo.com/")),
git_repository_icon: Some(String::from("fa-code-fork")),
..Default::default()
};
let got = Config::from_str(src).unwrap();
assert_eq!(got.book, book_should_be);
assert_eq!(got.build, build_should_be);
assert_eq!(got.html_config().unwrap(), html_should_be);
}
#[test]
fn load_arbitrary_output_type() {
#[derive(Debug, Deserialize, PartialEq)]
struct RandomOutput {
foo: u32,
bar: String,
baz: Vec<bool>,
}
let src = r#"
[output.random]
foo = 5
bar = "Hello World"
baz = [true, true, false]
"#;
let should_be = RandomOutput {
foo: 5,
bar: String::from("Hello World"),
baz: vec![true, true, false],
};
let cfg = Config::from_str(src).unwrap();
let got: RandomOutput = cfg.get_deserialized_opt("output.random").unwrap().unwrap();
assert_eq!(got, should_be);
let got_baz: Vec<bool> = cfg
.get_deserialized_opt("output.random.baz")
.unwrap()
.unwrap();
let baz_should_be = vec![true, true, false];
assert_eq!(got_baz, baz_should_be);
}
#[test]
fn mutate_some_stuff() {
// really this is just a sanity check to make sure the borrow checker
// is happy...
let src = COMPLEX_CONFIG;
let mut config = Config::from_str(src).unwrap();
let key = "output.html.playpen.editable";
assert_eq!(config.get(key).unwrap(), &Value::Boolean(true));
*config.get_mut(key).unwrap() = Value::Boolean(false);
assert_eq!(config.get(key).unwrap(), &Value::Boolean(false));
}
/// The config file format has slightly changed (metadata stuff is now under
/// the `book` table instead of being at the top level) so we're adding a
/// **temporary** compatibility check. You should be able to still load the
/// old format, emitting a warning.
#[test]
fn can_still_load_the_previous_format() {
let src = r#"
title = "mdBook Documentation"
description = "Create book from markdown files. Like Gitbook but implemented in Rust"
authors = ["Mathieu David"]
source = "./source"
[output.html]
destination = "my-book" # the output files will be generated in `root/my-book` instead of `root/book`
theme = "my-theme"
curly-quotes = true
google-analytics = "123456"
additional-css = ["custom.css", "custom2.css"]
additional-js = ["custom.js"]
"#;
let book_should_be = BookConfig {
title: Some(String::from("mdBook Documentation")),
description: Some(String::from(
"Create book from markdown files. Like Gitbook but implemented in Rust",
)),
authors: vec![String::from("Mathieu David")],
src: PathBuf::from("./source"),
..Default::default()
};
let build_should_be = BuildConfig {
build_dir: PathBuf::from("my-book"),
create_missing: true,
use_default_preprocessors: true,
};
let html_should_be = HtmlConfig {
theme: Some(PathBuf::from("my-theme")),
curly_quotes: true,
google_analytics: Some(String::from("123456")),
additional_css: vec![PathBuf::from("custom.css"), PathBuf::from("custom2.css")],
additional_js: vec![PathBuf::from("custom.js")],
..Default::default()
};
let got = Config::from_str(src).unwrap();
assert_eq!(got.book, book_should_be);
assert_eq!(got.build, build_should_be);
assert_eq!(got.html_config().unwrap(), html_should_be);
}
#[test]
fn set_a_config_item() {
let mut cfg = Config::default();
let key = "foo.bar.baz";
let value = "Something Interesting";
assert!(cfg.get(key).is_none());
cfg.set(key, value).unwrap();
let got: String = cfg.get_deserialized_opt(key).unwrap().unwrap();
assert_eq!(got, value);
}
#[test]
fn parse_env_vars() {
let inputs = vec![
("FOO", None),
("MDBOOK_foo", Some("foo")),
("MDBOOK_FOO__bar__baz", Some("foo.bar.baz")),
("MDBOOK_FOO_bar__baz", Some("foo-bar.baz")),
];
for (src, should_be) in inputs {
let got = parse_env(src);
let should_be = should_be.map(ToString::to_string);
assert_eq!(got, should_be);
}
}
fn encode_env_var(key: &str) -> String {
format!(
"MDBOOK_{}",
key.to_uppercase().replace('.', "__").replace("-", "_")
)
}
#[test]
fn update_config_using_env_var() {
let mut cfg = Config::default();
let key = "foo.bar";
let value = "baz";
assert!(cfg.get(key).is_none());
let encoded_key = encode_env_var(key);
env::set_var(encoded_key, value);
cfg.update_from_env();
assert_eq!(
cfg.get_deserialized_opt::<String, _>(key).unwrap().unwrap(),
value
);
}
#[test]
#[allow(clippy::approx_constant)]
fn update_config_using_env_var_and_complex_value() {
let mut cfg = Config::default();
let key = "foo-bar.baz";
let value = json!({"array": [1, 2, 3], "number": 3.14});
let value_str = serde_json::to_string(&value).unwrap();
assert!(cfg.get(key).is_none());
let encoded_key = encode_env_var(key);
env::set_var(encoded_key, value_str);
cfg.update_from_env();
assert_eq!(
cfg.get_deserialized_opt::<serde_json::Value, _>(key)
.unwrap()
.unwrap(),
value
);
}
#[test]
fn update_book_title_via_env() {
let mut cfg = Config::default();
let should_be = "Something else".to_string();
assert_ne!(cfg.book.title, Some(should_be.clone()));
env::set_var("MDBOOK_BOOK__TITLE", &should_be);
cfg.update_from_env();
assert_eq!(cfg.book.title, Some(should_be));
}
}
Fix doc comment of `BuildConfig::create_missing` (#1104)
//! Mdbook's configuration system.
//!
//! The main entrypoint of the `config` module is the `Config` struct. This acts
//! essentially as a bag of configuration information, with a couple
//! pre-determined tables (`BookConfig` and `BuildConfig`) as well as support
//! for arbitrary data which is exposed to plugins and alternative backends.
//!
//!
//! # Examples
//!
//! ```rust
//! # use mdbook::errors::*;
//! use std::path::PathBuf;
//! use std::str::FromStr;
//! use mdbook::Config;
//! use toml::Value;
//!
//! # fn run() -> Result<()> {
//! let src = r#"
//! [book]
//! title = "My Book"
//! authors = ["Michael-F-Bryan"]
//!
//! [build]
//! src = "out"
//!
//! [other-table.foo]
//! bar = 123
//! "#;
//!
//! // load the `Config` from a toml string
//! let mut cfg = Config::from_str(src)?;
//!
//! // retrieve a nested value
//! let bar = cfg.get("other-table.foo.bar").cloned();
//! assert_eq!(bar, Some(Value::Integer(123)));
//!
//! // Set the `output.html.theme` directory
//! assert!(cfg.get("output.html").is_none());
//! cfg.set("output.html.theme", "./themes");
//!
//! // then load it again, automatically deserializing to a `PathBuf`.
//! let got: Option<PathBuf> = cfg.get_deserialized_opt("output.html.theme")?;
//! assert_eq!(got, Some(PathBuf::from("./themes")));
//! # Ok(())
//! # }
//! # fn main() { run().unwrap() }
//! ```
#![deny(missing_docs)]
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::env;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use toml::value::Table;
use toml::{self, Value};
use toml_query::delete::TomlValueDeleteExt;
use toml_query::insert::TomlValueInsertExt;
use toml_query::read::TomlValueReadExt;
use crate::errors::*;
use crate::utils;
/// The overall configuration object for MDBook, essentially an in-memory
/// representation of `book.toml`.
#[derive(Debug, Clone, PartialEq)]
pub struct Config {
/// Metadata about the book.
pub book: BookConfig,
/// Information about the build environment.
pub build: BuildConfig,
rest: Value,
}
impl FromStr for Config {
type Err = Error;
/// Load a `Config` from some string.
fn from_str(src: &str) -> Result<Self> {
toml::from_str(src).chain_err(|| Error::from("Invalid configuration file"))
}
}
impl Config {
/// Load the configuration file from disk.
pub fn from_disk<P: AsRef<Path>>(config_file: P) -> Result<Config> {
let mut buffer = String::new();
File::open(config_file)
.chain_err(|| "Unable to open the configuration file")?
.read_to_string(&mut buffer)
.chain_err(|| "Couldn't read the file")?;
Config::from_str(&buffer)
}
/// Updates the `Config` from the available environment variables.
///
/// Variables starting with `MDBOOK_` are used for configuration. The key is
/// created by removing the `MDBOOK_` prefix and turning the resulting
/// string into `kebab-case`. Double underscores (`__`) separate nested
/// keys, while a single underscore (`_`) is replaced with a dash (`-`).
///
/// For example:
///
/// - `MDBOOK_foo` -> `foo`
/// - `MDBOOK_FOO` -> `foo`
/// - `MDBOOK_FOO__BAR` -> `foo.bar`
/// - `MDBOOK_FOO_BAR` -> `foo-bar`
/// - `MDBOOK_FOO_bar__baz` -> `foo-bar.baz`
///
/// So by setting the `MDBOOK_BOOK__TITLE` environment variable you can
/// override the book's title without needing to touch your `book.toml`.
///
/// > **Note:** To facilitate setting more complex config items, the value
/// > of an environment variable is first parsed as JSON, falling back to a
/// > string if the parse fails.
/// >
/// > This means, if you so desired, you could override all book metadata
/// > when building the book with something like
/// >
/// > ```text
/// > $ export MDBOOK_BOOK="{'title': 'My Awesome Book', authors: ['Michael-F-Bryan']}"
/// > $ mdbook build
/// > ```
///
/// The latter case may be useful in situations where `mdbook` is invoked
/// from a script or CI, where it sometimes isn't possible to update the
/// `book.toml` before building.
pub fn update_from_env(&mut self) {
debug!("Updating the config from environment variables");
let overrides =
env::vars().filter_map(|(key, value)| parse_env(&key).map(|index| (index, value)));
for (key, value) in overrides {
trace!("{} => {}", key, value);
let parsed_value = serde_json::from_str(&value)
.unwrap_or_else(|_| serde_json::Value::String(value.to_string()));
self.set(key, parsed_value).expect("unreachable");
}
}
/// Fetch an arbitrary item from the `Config` as a `toml::Value`.
///
/// You can use dotted indices to access nested items (e.g.
/// `output.html.playpen` will fetch the "playpen" out of the html output
/// table).
pub fn get(&self, key: &str) -> Option<&Value> {
self.rest.read(key).unwrap_or(None)
}
/// Fetch a value from the `Config` so you can mutate it.
pub fn get_mut(&mut self, key: &str) -> Option<&mut Value> {
match self.rest.read_mut(key) {
Ok(inner) => inner,
Err(_) => None,
}
}
/// Convenience method for getting the html renderer's configuration.
///
/// # Note
///
/// This is for compatibility only. It will be removed completely once the
/// HTML renderer is refactored to be less coupled to `mdbook` internals.
#[doc(hidden)]
pub fn html_config(&self) -> Option<HtmlConfig> {
match self.get_deserialized_opt("output.html") {
Ok(Some(config)) => Some(config),
Ok(None) => None,
Err(e) => {
utils::log_backtrace(&e.chain_err(|| "Parsing configuration [output.html]"));
None
}
}
}
/// Deprecated, use get_deserialized_opt instead.
#[deprecated = "use get_deserialized_opt instead"]
pub fn get_deserialized<'de, T: Deserialize<'de>, S: AsRef<str>>(&self, name: S) -> Result<T> {
let name = name.as_ref();
match self.get_deserialized_opt(name)? {
Some(value) => Ok(value),
None => bail!("Key not found, {:?}", name),
}
}
/// Convenience function to fetch a value from the config and deserialize it
/// into some arbitrary type.
pub fn get_deserialized_opt<'de, T: Deserialize<'de>, S: AsRef<str>>(
&self,
name: S,
) -> Result<Option<T>> {
let name = name.as_ref();
self.get(name)
.map(|value| {
value
.clone()
.try_into()
.chain_err(|| "Couldn't deserialize the value")
})
.transpose()
}
/// Set a config key, clobbering any existing values along the way.
///
/// The only way this can fail is if we can't serialize `value` into a
/// `toml::Value`.
pub fn set<S: Serialize, I: AsRef<str>>(&mut self, index: I, value: S) -> Result<()> {
let index = index.as_ref();
let value =
Value::try_from(value).chain_err(|| "Unable to represent the item as a JSON Value")?;
if index.starts_with("book.") {
self.book.update_value(&index[5..], value);
} else if index.starts_with("build.") {
self.build.update_value(&index[6..], value);
} else {
self.rest
.insert(index, value)
.map_err(ErrorKind::TomlQueryError)?;
}
Ok(())
}
/// Get the table associated with a particular renderer.
pub fn get_renderer<I: AsRef<str>>(&self, index: I) -> Option<&Table> {
let key = format!("output.{}", index.as_ref());
self.get(&key).and_then(Value::as_table)
}
/// Get the table associated with a particular preprocessor.
pub fn get_preprocessor<I: AsRef<str>>(&self, index: I) -> Option<&Table> {
let key = format!("preprocessor.{}", index.as_ref());
self.get(&key).and_then(Value::as_table)
}
fn from_legacy(mut table: Value) -> Config {
let mut cfg = Config::default();
// we use a macro here instead of a normal loop because the $out
// variable can be different types. This way we can make type inference
// figure out what try_into() deserializes to.
macro_rules! get_and_insert {
($table:expr, $key:expr => $out:expr) => {
let got = $table
.as_table_mut()
.and_then(|t| t.remove($key))
.and_then(|v| v.try_into().ok());
if let Some(value) = got {
$out = value;
}
};
}
get_and_insert!(table, "title" => cfg.book.title);
get_and_insert!(table, "authors" => cfg.book.authors);
get_and_insert!(table, "source" => cfg.book.src);
get_and_insert!(table, "description" => cfg.book.description);
if let Ok(Some(dest)) = table.delete("output.html.destination") {
if let Ok(destination) = dest.try_into() {
cfg.build.build_dir = destination;
}
}
cfg.rest = table;
cfg
}
}
impl Default for Config {
fn default() -> Config {
Config {
book: BookConfig::default(),
build: BuildConfig::default(),
rest: Value::Table(Table::default()),
}
}
}
impl<'de> Deserialize<'de> for Config {
fn deserialize<D: Deserializer<'de>>(de: D) -> std::result::Result<Self, D::Error> {
let raw = Value::deserialize(de)?;
if is_legacy_format(&raw) {
warn!("It looks like you are using the legacy book.toml format.");
warn!("We'll parse it for now, but you should probably convert to the new format.");
warn!("See the mdbook documentation for more details, although as a rule of thumb");
warn!("just move all top level configuration entries like `title`, `author` and");
warn!("`description` under a table called `[book]`, move the `destination` entry");
warn!("from `[output.html]`, renamed to `build-dir`, under a table called");
warn!("`[build]`, and it should all work.");
warn!("Documentation: http://rust-lang.github.io/mdBook/format/config.html");
return Ok(Config::from_legacy(raw));
}
let mut table = match raw {
Value::Table(t) => t,
_ => {
use serde::de::Error;
return Err(D::Error::custom(
"A config file should always be a toml table",
));
}
};
let book: BookConfig = table
.remove("book")
.and_then(|value| value.try_into().ok())
.unwrap_or_default();
let build: BuildConfig = table
.remove("build")
.and_then(|value| value.try_into().ok())
.unwrap_or_default();
Ok(Config {
book,
build,
rest: Value::Table(table),
})
}
}
impl Serialize for Config {
fn serialize<S: Serializer>(&self, s: S) -> std::result::Result<S::Ok, S::Error> {
use serde::ser::Error;
let mut table = self.rest.clone();
let book_config = match Value::try_from(self.book.clone()) {
Ok(cfg) => cfg,
Err(_) => {
return Err(S::Error::custom("Unable to serialize the BookConfig"));
}
};
table.insert("book", book_config).expect("unreachable");
table.serialize(s)
}
}
fn parse_env(key: &str) -> Option<String> {
const PREFIX: &str = "MDBOOK_";
if key.starts_with(PREFIX) {
let key = &key[PREFIX.len()..];
Some(key.to_lowercase().replace("__", ".").replace("_", "-"))
} else {
None
}
}
fn is_legacy_format(table: &Value) -> bool {
let legacy_items = [
"title",
"authors",
"source",
"description",
"output.html.destination",
];
for item in &legacy_items {
if let Ok(Some(_)) = table.read(item) {
return true;
}
}
false
}
/// Configuration options which are specific to the book and required for
/// loading it from disk.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct BookConfig {
/// The book's title.
pub title: Option<String>,
/// The book's authors.
pub authors: Vec<String>,
/// An optional description for the book.
pub description: Option<String>,
/// Location of the book source relative to the book's root directory.
pub src: PathBuf,
/// Does this book support more than one language?
pub multilingual: bool,
/// The main language of the book.
pub language: Option<String>,
}
impl Default for BookConfig {
fn default() -> BookConfig {
BookConfig {
title: None,
authors: Vec::new(),
description: None,
src: PathBuf::from("src"),
multilingual: false,
language: Some(String::from("en")),
}
}
}
/// Configuration for the build procedure.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct BuildConfig {
/// Where to put built artefacts relative to the book's root directory.
pub build_dir: PathBuf,
/// Should non-existent markdown files specified in `SUMMARY.md` be created
/// if they don't exist?
pub create_missing: bool,
/// Should the default preprocessors always be used when they are
/// compatible with the renderer?
pub use_default_preprocessors: bool,
}
impl Default for BuildConfig {
fn default() -> BuildConfig {
BuildConfig {
build_dir: PathBuf::from("book"),
create_missing: true,
use_default_preprocessors: true,
}
}
}
/// Configuration for the HTML renderer.
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct HtmlConfig {
/// The theme directory, if specified.
pub theme: Option<PathBuf>,
/// The default theme to use, defaults to 'light'
pub default_theme: Option<String>,
/// The theme to use if the browser requests the dark version of the site.
/// Defaults to the same as 'default_theme'
pub preferred_dark_theme: Option<String>,
/// Use "smart quotes" instead of the usual `"` character.
pub curly_quotes: bool,
/// Should mathjax be enabled?
pub mathjax_support: bool,
/// An optional google analytics code.
pub google_analytics: Option<String>,
/// Additional CSS stylesheets to include in the rendered page's `<head>`.
pub additional_css: Vec<PathBuf>,
/// Additional JS scripts to include at the bottom of the rendered page's
/// `<body>`.
pub additional_js: Vec<PathBuf>,
/// Fold settings.
pub fold: Fold,
/// Playpen settings.
pub playpen: Playpen,
/// Don't render section labels.
pub no_section_label: bool,
/// Search settings. If `None`, the default will be used.
pub search: Option<Search>,
/// Git repository url. If `None`, the git button will not be shown.
pub git_repository_url: Option<String>,
/// FontAwesome icon class to use for the Git repository link.
/// Defaults to `fa-github` if `None`.
pub git_repository_icon: Option<String>,
/// This is used as a bit of a workaround for the `mdbook serve` command.
/// Basically, because you set the websocket port from the command line, the
/// `mdbook serve` command needs a way to let the HTML renderer know where
/// to point livereloading at, if it has been enabled.
///
/// This config item *should not be edited* by the end user.
#[doc(hidden)]
pub livereload_url: Option<String>,
}
impl HtmlConfig {
/// Returns the directory of theme from the provided root directory. If the
/// directory is not present it will append the default directory of "theme"
pub fn theme_dir(&self, root: &PathBuf) -> PathBuf {
match self.theme {
Some(ref d) => root.join(d),
None => root.join("theme"),
}
}
}
/// Configuration for how to fold chapters of sidebar.
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct Fold {
/// When off, all folds are open. Default: `false`.
pub enable: bool,
/// The higher the more folded regions are open. When level is 0, all folds
/// are closed.
/// Default: `0`.
pub level: u8,
}
/// Configuration for tweaking how the the HTML renderer handles the playpen.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct Playpen {
/// Should playpen snippets be editable? Default: `false`.
pub editable: bool,
/// Display the copy button. Default: `true`.
pub copyable: bool,
/// Copy JavaScript files for the editor to the output directory?
/// Default: `true`.
pub copy_js: bool,
/// Display line numbers on playpen snippets. Default: `false`.
pub line_numbers: bool,
}
impl Default for Playpen {
fn default() -> Playpen {
Playpen {
editable: false,
copyable: true,
copy_js: true,
line_numbers: false,
}
}
}
/// Configuration of the search functionality of the HTML renderer.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct Search {
/// Enable the search feature. Default: `true`.
pub enable: bool,
/// Maximum number of visible results. Default: `30`.
pub limit_results: u32,
/// The number of words used for a search result teaser. Default: `30`.
pub teaser_word_count: u32,
/// Define the logical link between multiple search words.
/// If true, all search words must appear in each result. Default: `false`.
pub use_boolean_and: bool,
/// Boost factor for the search result score if a search word appears in the header.
/// Default: `2`.
pub boost_title: u8,
/// Boost factor for the search result score if a search word appears in the hierarchy.
/// The hierarchy contains all titles of the parent documents and all parent headings.
/// Default: `1`.
pub boost_hierarchy: u8,
/// Boost factor for the search result score if a search word appears in the text.
/// Default: `1`.
pub boost_paragraph: u8,
/// True if the searchword `micro` should match `microwave`. Default: `true`.
pub expand: bool,
/// Documents are split into smaller parts, seperated by headings. This defines, until which
/// level of heading documents should be split. Default: `3`. (`### This is a level 3 heading`)
pub heading_split_level: u8,
/// Copy JavaScript files for the search functionality to the output directory?
/// Default: `true`.
pub copy_js: bool,
}
impl Default for Search {
fn default() -> Search {
// Please update the documentation of `Search` when changing values!
Search {
enable: true,
limit_results: 30,
teaser_word_count: 30,
use_boolean_and: false,
boost_title: 2,
boost_hierarchy: 1,
boost_paragraph: 1,
expand: true,
heading_split_level: 3,
copy_js: true,
}
}
}
/// Allows you to "update" any arbitrary field in a struct by round-tripping via
/// a `toml::Value`.
///
/// This is definitely not the most performant way to do things, which means you
/// should probably keep it away from tight loops...
trait Updateable<'de>: Serialize + Deserialize<'de> {
fn update_value<S: Serialize>(&mut self, key: &str, value: S) {
let mut raw = Value::try_from(&self).expect("unreachable");
if let Ok(value) = Value::try_from(value) {
let _ = raw.insert(key, value);
} else {
return;
}
if let Ok(updated) = raw.try_into() {
*self = updated;
}
}
}
impl<'de, T> Updateable<'de> for T where T: Serialize + Deserialize<'de> {}
#[cfg(test)]
mod tests {
use super::*;
const COMPLEX_CONFIG: &str = r#"
[book]
title = "Some Book"
authors = ["Michael-F-Bryan <michaelfbryan@gmail.com>"]
description = "A completely useless book"
multilingual = true
src = "source"
language = "ja"
[build]
build-dir = "outputs"
create-missing = false
use-default-preprocessors = true
[output.html]
theme = "./themedir"
default-theme = "rust"
curly-quotes = true
google-analytics = "123456"
additional-css = ["./foo/bar/baz.css"]
git-repository-url = "https://foo.com/"
git-repository-icon = "fa-code-fork"
[output.html.playpen]
editable = true
editor = "ace"
[preprocessor.first]
[preprocessor.second]
"#;
#[test]
fn load_a_complex_config_file() {
let src = COMPLEX_CONFIG;
let book_should_be = BookConfig {
title: Some(String::from("Some Book")),
authors: vec![String::from("Michael-F-Bryan <michaelfbryan@gmail.com>")],
description: Some(String::from("A completely useless book")),
multilingual: true,
src: PathBuf::from("source"),
language: Some(String::from("ja")),
};
let build_should_be = BuildConfig {
build_dir: PathBuf::from("outputs"),
create_missing: false,
use_default_preprocessors: true,
};
let playpen_should_be = Playpen {
editable: true,
copyable: true,
copy_js: true,
line_numbers: false,
};
let html_should_be = HtmlConfig {
curly_quotes: true,
google_analytics: Some(String::from("123456")),
additional_css: vec![PathBuf::from("./foo/bar/baz.css")],
theme: Some(PathBuf::from("./themedir")),
default_theme: Some(String::from("rust")),
playpen: playpen_should_be,
git_repository_url: Some(String::from("https://foo.com/")),
git_repository_icon: Some(String::from("fa-code-fork")),
..Default::default()
};
let got = Config::from_str(src).unwrap();
assert_eq!(got.book, book_should_be);
assert_eq!(got.build, build_should_be);
assert_eq!(got.html_config().unwrap(), html_should_be);
}
#[test]
fn load_arbitrary_output_type() {
#[derive(Debug, Deserialize, PartialEq)]
struct RandomOutput {
foo: u32,
bar: String,
baz: Vec<bool>,
}
let src = r#"
[output.random]
foo = 5
bar = "Hello World"
baz = [true, true, false]
"#;
let should_be = RandomOutput {
foo: 5,
bar: String::from("Hello World"),
baz: vec![true, true, false],
};
let cfg = Config::from_str(src).unwrap();
let got: RandomOutput = cfg.get_deserialized_opt("output.random").unwrap().unwrap();
assert_eq!(got, should_be);
let got_baz: Vec<bool> = cfg
.get_deserialized_opt("output.random.baz")
.unwrap()
.unwrap();
let baz_should_be = vec![true, true, false];
assert_eq!(got_baz, baz_should_be);
}
#[test]
fn mutate_some_stuff() {
// really this is just a sanity check to make sure the borrow checker
// is happy...
let src = COMPLEX_CONFIG;
let mut config = Config::from_str(src).unwrap();
let key = "output.html.playpen.editable";
assert_eq!(config.get(key).unwrap(), &Value::Boolean(true));
*config.get_mut(key).unwrap() = Value::Boolean(false);
assert_eq!(config.get(key).unwrap(), &Value::Boolean(false));
}
/// The config file format has slightly changed (metadata stuff is now under
/// the `book` table instead of being at the top level) so we're adding a
/// **temporary** compatibility check. You should be able to still load the
/// old format, emitting a warning.
#[test]
fn can_still_load_the_previous_format() {
let src = r#"
title = "mdBook Documentation"
description = "Create book from markdown files. Like Gitbook but implemented in Rust"
authors = ["Mathieu David"]
source = "./source"
[output.html]
destination = "my-book" # the output files will be generated in `root/my-book` instead of `root/book`
theme = "my-theme"
curly-quotes = true
google-analytics = "123456"
additional-css = ["custom.css", "custom2.css"]
additional-js = ["custom.js"]
"#;
let book_should_be = BookConfig {
title: Some(String::from("mdBook Documentation")),
description: Some(String::from(
"Create book from markdown files. Like Gitbook but implemented in Rust",
)),
authors: vec![String::from("Mathieu David")],
src: PathBuf::from("./source"),
..Default::default()
};
let build_should_be = BuildConfig {
build_dir: PathBuf::from("my-book"),
create_missing: true,
use_default_preprocessors: true,
};
let html_should_be = HtmlConfig {
theme: Some(PathBuf::from("my-theme")),
curly_quotes: true,
google_analytics: Some(String::from("123456")),
additional_css: vec![PathBuf::from("custom.css"), PathBuf::from("custom2.css")],
additional_js: vec![PathBuf::from("custom.js")],
..Default::default()
};
let got = Config::from_str(src).unwrap();
assert_eq!(got.book, book_should_be);
assert_eq!(got.build, build_should_be);
assert_eq!(got.html_config().unwrap(), html_should_be);
}
#[test]
fn set_a_config_item() {
let mut cfg = Config::default();
let key = "foo.bar.baz";
let value = "Something Interesting";
assert!(cfg.get(key).is_none());
cfg.set(key, value).unwrap();
let got: String = cfg.get_deserialized_opt(key).unwrap().unwrap();
assert_eq!(got, value);
}
#[test]
fn parse_env_vars() {
let inputs = vec![
("FOO", None),
("MDBOOK_foo", Some("foo")),
("MDBOOK_FOO__bar__baz", Some("foo.bar.baz")),
("MDBOOK_FOO_bar__baz", Some("foo-bar.baz")),
];
for (src, should_be) in inputs {
let got = parse_env(src);
let should_be = should_be.map(ToString::to_string);
assert_eq!(got, should_be);
}
}
fn encode_env_var(key: &str) -> String {
format!(
"MDBOOK_{}",
key.to_uppercase().replace('.', "__").replace("-", "_")
)
}
#[test]
fn update_config_using_env_var() {
let mut cfg = Config::default();
let key = "foo.bar";
let value = "baz";
assert!(cfg.get(key).is_none());
let encoded_key = encode_env_var(key);
env::set_var(encoded_key, value);
cfg.update_from_env();
assert_eq!(
cfg.get_deserialized_opt::<String, _>(key).unwrap().unwrap(),
value
);
}
#[test]
#[allow(clippy::approx_constant)]
fn update_config_using_env_var_and_complex_value() {
let mut cfg = Config::default();
let key = "foo-bar.baz";
let value = json!({"array": [1, 2, 3], "number": 3.14});
let value_str = serde_json::to_string(&value).unwrap();
assert!(cfg.get(key).is_none());
let encoded_key = encode_env_var(key);
env::set_var(encoded_key, value_str);
cfg.update_from_env();
assert_eq!(
cfg.get_deserialized_opt::<serde_json::Value, _>(key)
.unwrap()
.unwrap(),
value
);
}
#[test]
fn update_book_title_via_env() {
let mut cfg = Config::default();
let should_be = "Something else".to_string();
assert_ne!(cfg.book.title, Some(should_be.clone()));
env::set_var("MDBOOK_BOOK__TITLE", &should_be);
cfg.update_from_env();
assert_eq!(cfg.book.title, Some(should_be));
}
}
|
pub const DBUS_TIMEOUT: i32 = 20000; // millieconds
pub const SECTOR_SIZE: u64 = 512;
pub const STRATIS_VERSION: &'static str = "1";
pub const MANAGER_NAME: &'static str = "/Manager";
pub const STRATIS_BASE_PATH: &'static str = "/org/storage/stratis1";
pub const STRATIS_BASE_SERVICE: &'static str = "org.storage.stratis1";
pub const STRATIS_BASE_MANAGER: &'static str = "/org/storage/stratis1/Manager";
pub const STRATIS_MANAGER_INTERFACE: &'static str = "org.storage.stratis1.Manager";
pub const STRATIS_POOL_BASE_INTERFACE: &'static str = "org.storage.stratis1.pool";
pub const STRATIS_VOLUME_BASE_INTERFACE: &'static str = "org.storage.stratis1.volume";
pub const STRATIS_DEV_BASE_INTERFACE: &'static str = "org.storage.stratis1.dev";
pub const STRATIS_CACHE_BASE_INTERFACE: &'static str = "org.storage.stratis1.cache";
pub const STRATIS_POOL_BASE_PATH: &'static str = "/org/storage/stratis/pool";
pub const LIST_POOLS: &'static str = "ListPools";
pub const CREATE_POOL: &'static str = "CreatePool";
pub const DESTROY_POOL: &'static str = "DestroyPool";
pub const GET_POOL_OBJECT_PATH: &'static str = "GetPoolObjectPath";
pub const GET_VOLUME_OBJECT_PATH: &'static str = "GetVolumeObjectPath";
pub const GET_DEV_OBJECT_PATH: &'static str = "GetDevObjectPath";
pub const GET_CACHE_OBJECT_PATH: &'static str = "GetCacheObjectPath";
pub const GET_ERROR_CODES: &'static str = "GetErrorCodes";
pub const GET_RAID_LEVELS: &'static str = "GetRaidLevels";
pub const GET_DEV_TYPES: &'static str = "GetDevTypes";
pub enum StratisErrorEnum {
STRATIS_OK,
STRATIS_ERROR,
STRATIS_NULL,
STRATIS_MALLOC,
STRATIS_NOTFOUND,
STRATIS_POOL_NOTFOUND,
STRATIS_VOLUME_NOTFOUND,
STRATIS_DEV_NOTFOUND,
STRATIS_CACHE_NOTFOUND,
STRATIS_BAD_PARAM,
STRATIS_ALREADY_EXISTS,
STRATIS_NULL_NAME,
STRATIS_NO_POOLS,
STRATIS_LIST_FAILURE,
STRATIS_ERROR_MAX,
}
pub enum StratisRaidType {
STRATIS_RAID_TYPE_UNKNOWN,
/** Single */
STRATIS_RAID_TYPE_SINGLE,
/** Mirror between two disks. For 4 disks or more, they are RAID10.*/
STRATIS_RAID_TYPE_RAID1,
/** Block-level striping with distributed parity */
STRATIS_RAID_TYPE_RAID5,
/** Block-level striping with two distributed parities, aka, RAID-DP */
STRATIS_RAID_TYPE_RAID6,
STRATIS_RAID_TYPE_MAX,
}
Fix spacing in consts.rs
Remove double spaces before some consts
Remove tabs in enums
Newline at end of file.
Signed-off-by: Andy Grover <b7d524d2f5cc5aebadb6b92b08d3ab26911cde33@redhat.com>
pub const DBUS_TIMEOUT: i32 = 20000; // millieconds
pub const SECTOR_SIZE: u64 = 512;
pub const STRATIS_VERSION: &'static str = "1";
pub const MANAGER_NAME: &'static str = "/Manager";
pub const STRATIS_BASE_PATH: &'static str = "/org/storage/stratis1";
pub const STRATIS_BASE_SERVICE: &'static str = "org.storage.stratis1";
pub const STRATIS_BASE_MANAGER: &'static str = "/org/storage/stratis1/Manager";
pub const STRATIS_MANAGER_INTERFACE: &'static str = "org.storage.stratis1.Manager";
pub const STRATIS_POOL_BASE_INTERFACE: &'static str = "org.storage.stratis1.pool";
pub const STRATIS_VOLUME_BASE_INTERFACE: &'static str = "org.storage.stratis1.volume";
pub const STRATIS_DEV_BASE_INTERFACE: &'static str = "org.storage.stratis1.dev";
pub const STRATIS_CACHE_BASE_INTERFACE: &'static str = "org.storage.stratis1.cache";
pub const STRATIS_POOL_BASE_PATH: &'static str = "/org/storage/stratis/pool";
pub const LIST_POOLS: &'static str = "ListPools";
pub const CREATE_POOL: &'static str = "CreatePool";
pub const DESTROY_POOL: &'static str = "DestroyPool";
pub const GET_POOL_OBJECT_PATH: &'static str = "GetPoolObjectPath";
pub const GET_VOLUME_OBJECT_PATH: &'static str = "GetVolumeObjectPath";
pub const GET_DEV_OBJECT_PATH: &'static str = "GetDevObjectPath";
pub const GET_CACHE_OBJECT_PATH: &'static str = "GetCacheObjectPath";
pub const GET_ERROR_CODES: &'static str = "GetErrorCodes";
pub const GET_RAID_LEVELS: &'static str = "GetRaidLevels";
pub const GET_DEV_TYPES: &'static str = "GetDevTypes";
pub enum StratisErrorEnum {
STRATIS_OK,
STRATIS_ERROR,
STRATIS_NULL,
STRATIS_MALLOC,
STRATIS_NOTFOUND,
STRATIS_POOL_NOTFOUND,
STRATIS_VOLUME_NOTFOUND,
STRATIS_DEV_NOTFOUND,
STRATIS_CACHE_NOTFOUND,
STRATIS_BAD_PARAM,
STRATIS_ALREADY_EXISTS,
STRATIS_NULL_NAME,
STRATIS_NO_POOLS,
STRATIS_LIST_FAILURE,
STRATIS_ERROR_MAX,
}
pub enum StratisRaidType {
STRATIS_RAID_TYPE_UNKNOWN,
/** Single */
STRATIS_RAID_TYPE_SINGLE,
/** Mirror between two disks. For 4 disks or more, they are RAID10.*/
STRATIS_RAID_TYPE_RAID1,
/** Block-level striping with distributed parity */
STRATIS_RAID_TYPE_RAID5,
/** Block-level striping with two distributed parities, aka, RAID-DP */
STRATIS_RAID_TYPE_RAID6,
STRATIS_RAID_TYPE_MAX,
}
|
use rustc::lint::Context;
use rustc::middle::const_eval::lookup_const_by_id;
use rustc::middle::def::PathResolution;
use rustc::middle::def::Def::*;
use syntax::ast::*;
use syntax::ptr::P;
use std::cmp::PartialOrd;
use std::cmp::Ordering::{self, Greater, Less, Equal};
use std::rc::Rc;
use std::ops::Deref;
use self::ConstantVariant::*;
use self::FloatWidth::*;
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum FloatWidth {
Fw32,
Fw64,
FwAny
}
impl From<FloatTy> for FloatWidth {
fn from(ty: FloatTy) -> FloatWidth {
match ty {
TyF32 => Fw32,
TyF64 => Fw64,
}
}
}
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct Constant {
pub constant: ConstantVariant,
pub needed_resolution: bool
}
impl Constant {
pub fn new(variant: ConstantVariant) -> Constant {
Constant { constant: variant, needed_resolution: false }
}
pub fn new_resolved(variant: ConstantVariant) -> Constant {
Constant { constant: variant, needed_resolution: true }
}
// convert this constant to a f64, if possible
pub fn as_float(&self) -> Option<f64> {
match &self.constant {
&ConstantByte(b) => Some(b as f64),
&ConstantFloat(ref s, _) => s.parse().ok(),
&ConstantInt(i, ty) => Some(if is_negative(ty) {
-(i as f64) } else { i as f64 }),
_ => None
}
}
}
impl PartialOrd for Constant {
fn partial_cmp(&self, other: &Constant) -> Option<Ordering> {
self.constant.partial_cmp(&other.constant)
}
}
/// a Lit_-like enum to fold constant `Expr`s into
#[derive(Eq, Debug, Clone)]
pub enum ConstantVariant {
/// a String "abc"
ConstantStr(String, StrStyle),
/// a Binary String b"abc"
ConstantBinary(Rc<Vec<u8>>),
/// a single byte b'a'
ConstantByte(u8),
/// a single char 'a'
ConstantChar(char),
/// an integer
ConstantInt(u64, LitIntType),
/// a float with given type
ConstantFloat(String, FloatWidth),
/// true or false
ConstantBool(bool),
/// an array of constants
ConstantVec(Vec<Constant>),
/// also an array, but with only one constant, repeated N times
ConstantRepeat(Box<ConstantVariant>, usize),
/// a tuple of constants
ConstantTuple(Vec<Constant>),
}
impl ConstantVariant {
/// convert to u64 if possible
///
/// # panics
///
/// if the constant could not be converted to u64 losslessly
fn as_u64(&self) -> u64 {
if let &ConstantInt(val, _) = self {
val // TODO we may want to check the sign if any
} else {
panic!("Could not convert a {:?} to u64");
}
}
}
impl PartialEq for ConstantVariant {
fn eq(&self, other: &ConstantVariant) -> bool {
match (self, other) {
(&ConstantStr(ref ls, ref lsty), &ConstantStr(ref rs, ref rsty)) =>
ls == rs && lsty == rsty,
(&ConstantBinary(ref l),&ConstantBinary(ref r)) => l == r,
(&ConstantByte(l), &ConstantByte(r)) => l == r,
(&ConstantChar(l), &ConstantChar(r)) => l == r,
(&ConstantInt(lv, lty), &ConstantInt(rv, rty)) => lv == rv &&
(is_negative(lty) & (lv != 0)) == (is_negative(rty) & (rv != 0)),
(&ConstantFloat(ref ls, lw), &ConstantFloat(ref rs, rw)) =>
if match (lw, rw) {
(FwAny, _) | (_, FwAny) | (Fw32, Fw32) | (Fw64, Fw64) => true,
_ => false,
} {
match (ls.parse::<f64>(), rs.parse::<f64>()) {
(Ok(l), Ok(r)) => l.eq(&r),
_ => false,
}
} else { false },
(&ConstantBool(l), &ConstantBool(r)) => l == r,
(&ConstantVec(ref l), &ConstantVec(ref r)) => l == r,
(&ConstantRepeat(ref lv, ref ls), &ConstantRepeat(ref rv, ref rs)) =>
ls == rs && lv == rv,
(&ConstantTuple(ref l), &ConstantTuple(ref r)) => l == r,
_ => false, //TODO: Are there inter-type equalities?
}
}
}
impl PartialOrd for ConstantVariant {
fn partial_cmp(&self, other: &ConstantVariant) -> Option<Ordering> {
match (self, other) {
(&ConstantStr(ref ls, ref lsty), &ConstantStr(ref rs, ref rsty)) =>
if lsty != rsty { None } else { Some(ls.cmp(rs)) },
(&ConstantByte(ref l), &ConstantByte(ref r)) => Some(l.cmp(r)),
(&ConstantChar(ref l), &ConstantChar(ref r)) => Some(l.cmp(r)),
(&ConstantInt(ref lv, lty), &ConstantInt(ref rv, rty)) =>
Some(match (is_negative(lty) && *lv != 0,
is_negative(rty) && *rv != 0) {
(true, true) => lv.cmp(rv),
(false, false) => rv.cmp(lv),
(true, false) => Greater,
(false, true) => Less,
}),
(&ConstantFloat(ref ls, lw), &ConstantFloat(ref rs, rw)) =>
if match (lw, rw) {
(FwAny, _) | (_, FwAny) | (Fw32, Fw32) | (Fw64, Fw64) => true,
_ => false,
} {
match (ls.parse::<f64>(), rs.parse::<f64>()) {
(Ok(ref l), Ok(ref r)) => l.partial_cmp(r),
_ => None,
}
} else { None },
(&ConstantBool(ref l), &ConstantBool(ref r)) => Some(l.cmp(r)),
(&ConstantVec(ref l), &ConstantVec(ref r)) => l.partial_cmp(&r),
(&ConstantRepeat(ref lv, ref ls), &ConstantRepeat(ref rv, ref rs)) =>
match lv.partial_cmp(rv) {
Some(Equal) => Some(ls.cmp(rs)),
x => x,
},
(&ConstantTuple(ref l), &ConstantTuple(ref r)) => l.partial_cmp(r),
_ => None, //TODO: Are there any useful inter-type orderings?
}
}
}
/// simple constant folding: Insert an expression, get a constant or none.
pub fn constant(cx: &Context, e: &Expr) -> Option<Constant> {
match &e.node {
&ExprParen(ref inner) => constant(cx, inner),
&ExprPath(_, _) => fetch_path(cx, e),
&ExprBlock(ref block) => constant_block(cx, block),
&ExprIf(ref cond, ref then, ref otherwise) =>
constant_if(cx, &*cond, &*then, &*otherwise),
&ExprLit(ref lit) => Some(lit_to_constant(&lit.node)),
&ExprVec(ref vec) => constant_vec(cx, &vec[..]),
&ExprTup(ref tup) => constant_tup(cx, &tup[..]),
&ExprRepeat(ref value, ref number) =>
constant_binop_apply(cx, value, number,|v, n|
Some(ConstantRepeat(Box::new(v), n.as_u64() as usize))),
&ExprUnary(op, ref operand) => constant(cx, operand).and_then(
|o| match op {
UnNot =>
if let ConstantBool(b) = o.constant {
Some(Constant{
needed_resolution: o.needed_resolution,
constant: ConstantBool(!b),
})
} else { None },
UnNeg => constant_negate(o),
UnUniq | UnDeref => Some(o),
}),
&ExprBinary(op, ref left, ref right) =>
constant_binop(cx, op, left, right),
//TODO: add other expressions
_ => None,
}
}
fn lit_to_constant(lit: &Lit_) -> Constant {
match lit {
&LitStr(ref is, style) =>
Constant::new(ConstantStr(is.to_string(), style)),
&LitBinary(ref blob) => Constant::new(ConstantBinary(blob.clone())),
&LitByte(b) => Constant::new(ConstantByte(b)),
&LitChar(c) => Constant::new(ConstantChar(c)),
&LitInt(value, ty) => Constant::new(ConstantInt(value, ty)),
&LitFloat(ref is, ty) => {
Constant::new(ConstantFloat(is.to_string(), ty.into()))
},
&LitFloatUnsuffixed(ref is) => {
Constant::new(ConstantFloat(is.to_string(), FwAny))
},
&LitBool(b) => Constant::new(ConstantBool(b)),
}
}
/// create `Some(ConstantVec(..))` of all constants, unless there is any
/// non-constant part
fn constant_vec<E: Deref<Target=Expr> + Sized>(cx: &Context, vec: &[E]) -> Option<Constant> {
let mut parts = Vec::new();
let mut resolved = false;
for opt_part in vec {
match constant(cx, opt_part) {
Some(p) => {
resolved |= (&p).needed_resolution;
parts.push(p)
},
None => { return None; },
}
}
Some(Constant {
constant: ConstantVec(parts),
needed_resolution: resolved
})
}
fn constant_tup<E: Deref<Target=Expr> + Sized>(cx: &Context, tup: &[E]) -> Option<Constant> {
let mut parts = Vec::new();
let mut resolved = false;
for opt_part in tup {
match constant(cx, opt_part) {
Some(p) => {
resolved |= (&p).needed_resolution;
parts.push(p)
},
None => { return None; },
}
}
Some(Constant {
constant: ConstantTuple(parts),
needed_resolution: resolved
})
}
/// lookup a possibly constant expression from a ExprPath
fn fetch_path(cx: &Context, e: &Expr) -> Option<Constant> {
if let Some(&PathResolution { base_def: DefConst(id), ..}) =
cx.tcx.def_map.borrow().get(&e.id) {
lookup_const_by_id(cx.tcx, id, None).and_then(
|l| constant(cx, l).map(|c| Constant::new_resolved(c.constant)))
} else { None }
}
/// A block can only yield a constant if it only has one constant expression
fn constant_block(cx: &Context, block: &Block) -> Option<Constant> {
if block.stmts.is_empty() {
block.expr.as_ref().and_then(|b| constant(cx, &*b))
} else { None }
}
fn constant_if(cx: &Context, cond: &Expr, then: &Block, otherwise:
&Option<P<Expr>>) -> Option<Constant> {
if let Some(Constant{ constant: ConstantBool(b), needed_resolution: res }) =
constant(cx, cond) {
if b {
constant_block(cx, then)
} else {
otherwise.as_ref().and_then(|expr| constant(cx, &*expr))
}.map(|part|
Constant {
constant: part.constant,
needed_resolution: res || part.needed_resolution,
})
} else { None }
}
fn constant_negate(o: Constant) -> Option<Constant> {
Some(Constant{
needed_resolution: o.needed_resolution,
constant: match o.constant {
ConstantInt(value, ty) =>
ConstantInt(value, match ty {
SignedIntLit(ity, sign) =>
SignedIntLit(ity, neg_sign(sign)),
UnsuffixedIntLit(sign) => UnsuffixedIntLit(neg_sign(sign)),
_ => { return None; },
}),
ConstantFloat(is, ty) =>
ConstantFloat(neg_float_str(is), ty),
_ => { return None; },
}
})
}
fn neg_sign(s: Sign) -> Sign {
match s {
Sign::Plus => Sign::Minus,
Sign::Minus => Sign::Plus,
}
}
fn neg_float_str(s: String) -> String {
if s.starts_with('-') {
s[1..].to_owned()
} else {
format!("-{}", &*s)
}
}
/// is the given LitIntType negative?
///
/// Examples
///
/// ```
/// assert!(is_negative(UnsuffixedIntLit(Minus)));
/// ```
pub fn is_negative(ty: LitIntType) -> bool {
match ty {
SignedIntLit(_, sign) | UnsuffixedIntLit(sign) => sign == Minus,
UnsignedIntLit(_) => false,
}
}
fn unify_int_type(l: LitIntType, r: LitIntType, s: Sign) -> Option<LitIntType> {
match (l, r) {
(SignedIntLit(lty, _), SignedIntLit(rty, _)) => if lty == rty {
Some(SignedIntLit(lty, s)) } else { None },
(UnsignedIntLit(lty), UnsignedIntLit(rty)) =>
if s == Plus && lty == rty {
Some(UnsignedIntLit(lty))
} else { None },
(UnsuffixedIntLit(_), UnsuffixedIntLit(_)) => Some(UnsuffixedIntLit(s)),
(SignedIntLit(lty, _), UnsuffixedIntLit(_)) => Some(SignedIntLit(lty, s)),
(UnsignedIntLit(lty), UnsuffixedIntLit(rs)) => if rs == Plus {
Some(UnsignedIntLit(lty)) } else { None },
(UnsuffixedIntLit(_), SignedIntLit(rty, _)) => Some(SignedIntLit(rty, s)),
(UnsuffixedIntLit(ls), UnsignedIntLit(rty)) => if ls == Plus {
Some(UnsignedIntLit(rty)) } else { None },
_ => None,
}
}
fn constant_binop(cx: &Context, op: BinOp, left: &Expr, right: &Expr)
-> Option<Constant> {
match op.node {
BiAdd => constant_binop_apply(cx, left, right, |l, r|
match (l, r) {
(ConstantByte(l8), ConstantByte(r8)) =>
l8.checked_add(r8).map(ConstantByte),
(ConstantInt(l64, lty), ConstantInt(r64, rty)) => {
let (ln, rn) = (is_negative(lty), is_negative(rty));
if ln == rn {
unify_int_type(lty, rty, if ln { Minus } else { Plus })
.and_then(|ty| l64.checked_add(r64).map(
|v| ConstantInt(v, ty)))
} else {
if ln {
add_neg_int(r64, rty, l64, lty)
} else {
add_neg_int(l64, lty, r64, rty)
}
}
},
// TODO: float (would need bignum library?)
_ => None
}),
BiSub => constant_binop_apply(cx, left, right, |l, r|
match (l, r) {
(ConstantByte(l8), ConstantByte(r8)) => if r8 > l8 {
None } else { Some(ConstantByte(l8 - r8)) },
(ConstantInt(l64, lty), ConstantInt(r64, rty)) => {
let (ln, rn) = (is_negative(lty), is_negative(rty));
match (ln, rn) {
(false, false) => sub_int(l64, lty, r64, rty, r64 > l64),
(true, true) => sub_int(l64, lty, r64, rty, l64 > r64),
(true, false) => unify_int_type(lty, rty, Minus)
.and_then(|ty| l64.checked_add(r64).map(
|v| ConstantInt(v, ty))),
(false, true) => unify_int_type(lty, rty, Plus)
.and_then(|ty| l64.checked_add(r64).map(
|v| ConstantInt(v, ty))),
}
},
_ => None,
}),
//BiMul,
//BiDiv,
//BiRem,
BiAnd => constant_short_circuit(cx, left, right, false),
BiOr => constant_short_circuit(cx, left, right, true),
BiBitXor => constant_bitop(cx, left, right, |x, y| x ^ y),
BiBitAnd => constant_bitop(cx, left, right, |x, y| x & y),
BiBitOr => constant_bitop(cx, left, right, |x, y| (x | y)),
BiShl => constant_bitop(cx, left, right, |x, y| x << y),
BiShr => constant_bitop(cx, left, right, |x, y| x >> y),
BiEq => constant_binop_apply(cx, left, right,
|l, r| Some(ConstantBool(l == r))),
BiNe => constant_binop_apply(cx, left, right,
|l, r| Some(ConstantBool(l != r))),
BiLt => constant_cmp(cx, left, right, Less, true),
BiLe => constant_cmp(cx, left, right, Greater, false),
BiGe => constant_cmp(cx, left, right, Less, false),
BiGt => constant_cmp(cx, left, right, Greater, true),
_ => None
}
}
fn constant_bitop<F>(cx: &Context, left: &Expr, right: &Expr, f: F)
-> Option<Constant> where F: Fn(u64, u64) -> u64 {
constant_binop_apply(cx, left, right, |l, r| match (l, r) {
(ConstantBool(l), ConstantBool(r)) =>
Some(ConstantBool(f(l as u64, r as u64) != 0)),
(ConstantByte(l8), ConstantByte(r8)) =>
Some(ConstantByte(f(l8 as u64, r8 as u64) as u8)),
(ConstantInt(l, lty), ConstantInt(r, rty)) =>
unify_int_type(lty, rty, Plus).map(|ty| ConstantInt(f(l, r), ty)),
_ => None
})
}
fn constant_cmp(cx: &Context, left: &Expr, right: &Expr, ordering: Ordering,
b: bool) -> Option<Constant> {
constant_binop_apply(cx, left, right, |l, r| l.partial_cmp(&r).map(|o|
ConstantBool(b == (o == ordering))))
}
fn add_neg_int(pos: u64, pty: LitIntType, neg: u64, nty: LitIntType) ->
Option<ConstantVariant> {
if neg > pos {
unify_int_type(nty, pty, Minus).map(|ty| ConstantInt(neg - pos, ty))
} else {
unify_int_type(nty, pty, Plus).map(|ty| ConstantInt(pos - neg, ty))
}
}
fn sub_int(l: u64, lty: LitIntType, r: u64, rty: LitIntType, neg: bool) ->
Option<ConstantVariant> {
unify_int_type(lty, rty, if neg { Minus } else { Plus }).and_then(
|ty| l.checked_sub(r).map(|v| ConstantInt(v, ty)))
}
fn constant_binop_apply<F>(cx: &Context, left: &Expr, right: &Expr, op: F)
-> Option<Constant>
where F: Fn(ConstantVariant, ConstantVariant) -> Option<ConstantVariant> {
if let (Some(Constant { constant: lc, needed_resolution: ln }),
Some(Constant { constant: rc, needed_resolution: rn })) =
(constant(cx, left), constant(cx, right)) {
op(lc, rc).map(|c|
Constant {
needed_resolution: ln || rn,
constant: c,
})
} else { None }
}
fn constant_short_circuit(cx: &Context, left: &Expr, right: &Expr, b: bool) ->
Option<Constant> {
constant(cx, left).and_then(|left|
if let &ConstantBool(lbool) = &left.constant {
if lbool == b {
Some(left)
} else {
constant(cx, right).and_then(|right|
if let ConstantBool(_) = right.constant {
Some(Constant {
constant: right.constant,
needed_resolution: left.needed_resolution ||
right.needed_resolution,
})
} else { None }
)
}
} else { None }
)
}
const eval: implement ! for integers
use rustc::lint::Context;
use rustc::middle::const_eval::lookup_const_by_id;
use rustc::middle::def::PathResolution;
use rustc::middle::def::Def::*;
use syntax::ast::*;
use syntax::ptr::P;
use std::cmp::PartialOrd;
use std::cmp::Ordering::{self, Greater, Less, Equal};
use std::rc::Rc;
use std::ops::Deref;
use self::ConstantVariant::*;
use self::FloatWidth::*;
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum FloatWidth {
Fw32,
Fw64,
FwAny
}
impl From<FloatTy> for FloatWidth {
fn from(ty: FloatTy) -> FloatWidth {
match ty {
TyF32 => Fw32,
TyF64 => Fw64,
}
}
}
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct Constant {
pub constant: ConstantVariant,
pub needed_resolution: bool
}
impl Constant {
pub fn new(variant: ConstantVariant) -> Constant {
Constant { constant: variant, needed_resolution: false }
}
pub fn new_resolved(variant: ConstantVariant) -> Constant {
Constant { constant: variant, needed_resolution: true }
}
// convert this constant to a f64, if possible
pub fn as_float(&self) -> Option<f64> {
match &self.constant {
&ConstantByte(b) => Some(b as f64),
&ConstantFloat(ref s, _) => s.parse().ok(),
&ConstantInt(i, ty) => Some(if is_negative(ty) {
-(i as f64) } else { i as f64 }),
_ => None
}
}
}
impl PartialOrd for Constant {
fn partial_cmp(&self, other: &Constant) -> Option<Ordering> {
self.constant.partial_cmp(&other.constant)
}
}
/// a Lit_-like enum to fold constant `Expr`s into
#[derive(Eq, Debug, Clone)]
pub enum ConstantVariant {
/// a String "abc"
ConstantStr(String, StrStyle),
/// a Binary String b"abc"
ConstantBinary(Rc<Vec<u8>>),
/// a single byte b'a'
ConstantByte(u8),
/// a single char 'a'
ConstantChar(char),
/// an integer
ConstantInt(u64, LitIntType),
/// a float with given type
ConstantFloat(String, FloatWidth),
/// true or false
ConstantBool(bool),
/// an array of constants
ConstantVec(Vec<Constant>),
/// also an array, but with only one constant, repeated N times
ConstantRepeat(Box<ConstantVariant>, usize),
/// a tuple of constants
ConstantTuple(Vec<Constant>),
}
impl ConstantVariant {
/// convert to u64 if possible
///
/// # panics
///
/// if the constant could not be converted to u64 losslessly
fn as_u64(&self) -> u64 {
if let &ConstantInt(val, _) = self {
val // TODO we may want to check the sign if any
} else {
panic!("Could not convert a {:?} to u64");
}
}
}
impl PartialEq for ConstantVariant {
fn eq(&self, other: &ConstantVariant) -> bool {
match (self, other) {
(&ConstantStr(ref ls, ref lsty), &ConstantStr(ref rs, ref rsty)) =>
ls == rs && lsty == rsty,
(&ConstantBinary(ref l), &ConstantBinary(ref r)) => l == r,
(&ConstantByte(l), &ConstantByte(r)) => l == r,
(&ConstantChar(l), &ConstantChar(r)) => l == r,
(&ConstantInt(lv, lty), &ConstantInt(rv, rty)) => lv == rv &&
(is_negative(lty) & (lv != 0)) == (is_negative(rty) & (rv != 0)),
(&ConstantFloat(ref ls, lw), &ConstantFloat(ref rs, rw)) =>
if match (lw, rw) {
(FwAny, _) | (_, FwAny) | (Fw32, Fw32) | (Fw64, Fw64) => true,
_ => false,
} {
match (ls.parse::<f64>(), rs.parse::<f64>()) {
(Ok(l), Ok(r)) => l.eq(&r),
_ => false,
}
} else { false },
(&ConstantBool(l), &ConstantBool(r)) => l == r,
(&ConstantVec(ref l), &ConstantVec(ref r)) => l == r,
(&ConstantRepeat(ref lv, ref ls), &ConstantRepeat(ref rv, ref rs)) =>
ls == rs && lv == rv,
(&ConstantTuple(ref l), &ConstantTuple(ref r)) => l == r,
_ => false, //TODO: Are there inter-type equalities?
}
}
}
impl PartialOrd for ConstantVariant {
fn partial_cmp(&self, other: &ConstantVariant) -> Option<Ordering> {
match (self, other) {
(&ConstantStr(ref ls, ref lsty), &ConstantStr(ref rs, ref rsty)) =>
if lsty != rsty { None } else { Some(ls.cmp(rs)) },
(&ConstantByte(ref l), &ConstantByte(ref r)) => Some(l.cmp(r)),
(&ConstantChar(ref l), &ConstantChar(ref r)) => Some(l.cmp(r)),
(&ConstantInt(ref lv, lty), &ConstantInt(ref rv, rty)) =>
Some(match (is_negative(lty) && *lv != 0,
is_negative(rty) && *rv != 0) {
(true, true) => lv.cmp(rv),
(false, false) => rv.cmp(lv),
(true, false) => Greater,
(false, true) => Less,
}),
(&ConstantFloat(ref ls, lw), &ConstantFloat(ref rs, rw)) =>
if match (lw, rw) {
(FwAny, _) | (_, FwAny) | (Fw32, Fw32) | (Fw64, Fw64) => true,
_ => false,
} {
match (ls.parse::<f64>(), rs.parse::<f64>()) {
(Ok(ref l), Ok(ref r)) => l.partial_cmp(r),
_ => None,
}
} else { None },
(&ConstantBool(ref l), &ConstantBool(ref r)) => Some(l.cmp(r)),
(&ConstantVec(ref l), &ConstantVec(ref r)) => l.partial_cmp(&r),
(&ConstantRepeat(ref lv, ref ls), &ConstantRepeat(ref rv, ref rs)) =>
match lv.partial_cmp(rv) {
Some(Equal) => Some(ls.cmp(rs)),
x => x,
},
(&ConstantTuple(ref l), &ConstantTuple(ref r)) => l.partial_cmp(r),
_ => None, //TODO: Are there any useful inter-type orderings?
}
}
}
/// simple constant folding: Insert an expression, get a constant or none.
pub fn constant(cx: &Context, e: &Expr) -> Option<Constant> {
match &e.node {
&ExprParen(ref inner) => constant(cx, inner),
&ExprPath(_, _) => fetch_path(cx, e),
&ExprBlock(ref block) => constant_block(cx, block),
&ExprIf(ref cond, ref then, ref otherwise) =>
constant_if(cx, &*cond, &*then, &*otherwise),
&ExprLit(ref lit) => Some(lit_to_constant(&lit.node)),
&ExprVec(ref vec) => constant_vec(cx, &vec[..]),
&ExprTup(ref tup) => constant_tup(cx, &tup[..]),
&ExprRepeat(ref value, ref number) =>
constant_binop_apply(cx, value, number,|v, n|
Some(ConstantRepeat(Box::new(v), n.as_u64() as usize))),
&ExprUnary(op, ref operand) => constant(cx, operand).and_then(
|o| match op {
UnNot => constant_not(o),
UnNeg => constant_negate(o),
UnUniq | UnDeref => Some(o),
}),
&ExprBinary(op, ref left, ref right) =>
constant_binop(cx, op, left, right),
//TODO: add other expressions
_ => None,
}
}
fn lit_to_constant(lit: &Lit_) -> Constant {
match lit {
&LitStr(ref is, style) =>
Constant::new(ConstantStr(is.to_string(), style)),
&LitBinary(ref blob) => Constant::new(ConstantBinary(blob.clone())),
&LitByte(b) => Constant::new(ConstantByte(b)),
&LitChar(c) => Constant::new(ConstantChar(c)),
&LitInt(value, ty) => Constant::new(ConstantInt(value, ty)),
&LitFloat(ref is, ty) => {
Constant::new(ConstantFloat(is.to_string(), ty.into()))
},
&LitFloatUnsuffixed(ref is) => {
Constant::new(ConstantFloat(is.to_string(), FwAny))
},
&LitBool(b) => Constant::new(ConstantBool(b)),
}
}
/// create `Some(ConstantVec(..))` of all constants, unless there is any
/// non-constant part
fn constant_vec<E: Deref<Target=Expr> + Sized>(cx: &Context, vec: &[E]) -> Option<Constant> {
let mut parts = Vec::new();
let mut resolved = false;
for opt_part in vec {
match constant(cx, opt_part) {
Some(p) => {
resolved |= p.needed_resolution;
parts.push(p)
},
None => { return None; },
}
}
Some(Constant {
constant: ConstantVec(parts),
needed_resolution: resolved
})
}
fn constant_tup<E: Deref<Target=Expr> + Sized>(cx: &Context, tup: &[E]) -> Option<Constant> {
let mut parts = Vec::new();
let mut resolved = false;
for opt_part in tup {
match constant(cx, opt_part) {
Some(p) => {
resolved |= p.needed_resolution;
parts.push(p)
},
None => { return None; },
}
}
Some(Constant {
constant: ConstantTuple(parts),
needed_resolution: resolved
})
}
/// lookup a possibly constant expression from a ExprPath
fn fetch_path(cx: &Context, e: &Expr) -> Option<Constant> {
if let Some(&PathResolution { base_def: DefConst(id), ..}) =
cx.tcx.def_map.borrow().get(&e.id) {
lookup_const_by_id(cx.tcx, id, None).and_then(
|l| constant(cx, l).map(|c| Constant::new_resolved(c.constant)))
} else { None }
}
/// A block can only yield a constant if it only has one constant expression
fn constant_block(cx: &Context, block: &Block) -> Option<Constant> {
if block.stmts.is_empty() {
block.expr.as_ref().and_then(|b| constant(cx, &*b))
} else { None }
}
fn constant_if(cx: &Context, cond: &Expr, then: &Block, otherwise:
&Option<P<Expr>>) -> Option<Constant> {
if let Some(Constant{ constant: ConstantBool(b), needed_resolution: res }) =
constant(cx, cond) {
if b {
constant_block(cx, then)
} else {
otherwise.as_ref().and_then(|expr| constant(cx, &*expr))
}.map(|part|
Constant {
constant: part.constant,
needed_resolution: res || part.needed_resolution,
})
} else { None }
}
fn constant_not(o: Constant) -> Option<Constant> {
Some(Constant {
needed_resolution: o.needed_resolution,
constant: match o.constant {
ConstantBool(b) => ConstantBool(!b),
ConstantInt(value, ty) => {
let (nvalue, nty) = match ty {
SignedIntLit(ity, Plus) => {
if value == ::std::u64::MAX { return None; }
(value + 1, SignedIntLit(ity, Minus))
},
SignedIntLit(ity, Minus) => {
if value == 0 {
(1, SignedIntLit(ity, Minus))
} else {
(value - 1, SignedIntLit(ity, Plus))
}
}
UnsignedIntLit(ity) => {
let mask = match ity {
UintTy::TyU8 => ::std::u8::MAX as u64,
UintTy::TyU16 => ::std::u16::MAX as u64,
UintTy::TyU32 => ::std::u32::MAX as u64,
UintTy::TyU64 => ::std::u64::MAX,
UintTy::TyUs => { return None; } // refuse to guess
};
(!value & mask, UnsignedIntLit(ity))
}
UnsuffixedIntLit(_) => { return None; } // refuse to guess
};
ConstantInt(nvalue, nty)
},
_ => { return None; }
}
})
}
fn constant_negate(o: Constant) -> Option<Constant> {
Some(Constant{
needed_resolution: o.needed_resolution,
constant: match o.constant {
ConstantInt(value, ty) =>
ConstantInt(value, match ty {
SignedIntLit(ity, sign) =>
SignedIntLit(ity, neg_sign(sign)),
UnsuffixedIntLit(sign) => UnsuffixedIntLit(neg_sign(sign)),
_ => { return None; },
}),
ConstantFloat(is, ty) =>
ConstantFloat(neg_float_str(is), ty),
_ => { return None; },
}
})
}
fn neg_sign(s: Sign) -> Sign {
match s {
Sign::Plus => Sign::Minus,
Sign::Minus => Sign::Plus,
}
}
fn neg_float_str(s: String) -> String {
if s.starts_with('-') {
s[1..].to_owned()
} else {
format!("-{}", &*s)
}
}
/// is the given LitIntType negative?
///
/// Examples
///
/// ```
/// assert!(is_negative(UnsuffixedIntLit(Minus)));
/// ```
pub fn is_negative(ty: LitIntType) -> bool {
match ty {
SignedIntLit(_, sign) | UnsuffixedIntLit(sign) => sign == Minus,
UnsignedIntLit(_) => false,
}
}
fn unify_int_type(l: LitIntType, r: LitIntType, s: Sign) -> Option<LitIntType> {
match (l, r) {
(SignedIntLit(lty, _), SignedIntLit(rty, _)) => if lty == rty {
Some(SignedIntLit(lty, s)) } else { None },
(UnsignedIntLit(lty), UnsignedIntLit(rty)) =>
if s == Plus && lty == rty {
Some(UnsignedIntLit(lty))
} else { None },
(UnsuffixedIntLit(_), UnsuffixedIntLit(_)) => Some(UnsuffixedIntLit(s)),
(SignedIntLit(lty, _), UnsuffixedIntLit(_)) => Some(SignedIntLit(lty, s)),
(UnsignedIntLit(lty), UnsuffixedIntLit(rs)) => if rs == Plus {
Some(UnsignedIntLit(lty)) } else { None },
(UnsuffixedIntLit(_), SignedIntLit(rty, _)) => Some(SignedIntLit(rty, s)),
(UnsuffixedIntLit(ls), UnsignedIntLit(rty)) => if ls == Plus {
Some(UnsignedIntLit(rty)) } else { None },
_ => None,
}
}
fn constant_binop(cx: &Context, op: BinOp, left: &Expr, right: &Expr)
-> Option<Constant> {
match op.node {
BiAdd => constant_binop_apply(cx, left, right, |l, r|
match (l, r) {
(ConstantByte(l8), ConstantByte(r8)) =>
l8.checked_add(r8).map(ConstantByte),
(ConstantInt(l64, lty), ConstantInt(r64, rty)) => {
let (ln, rn) = (is_negative(lty), is_negative(rty));
if ln == rn {
unify_int_type(lty, rty, if ln { Minus } else { Plus })
.and_then(|ty| l64.checked_add(r64).map(
|v| ConstantInt(v, ty)))
} else {
if ln {
add_neg_int(r64, rty, l64, lty)
} else {
add_neg_int(l64, lty, r64, rty)
}
}
},
// TODO: float (would need bignum library?)
_ => None
}),
BiSub => constant_binop_apply(cx, left, right, |l, r|
match (l, r) {
(ConstantByte(l8), ConstantByte(r8)) => if r8 > l8 {
None } else { Some(ConstantByte(l8 - r8)) },
(ConstantInt(l64, lty), ConstantInt(r64, rty)) => {
let (ln, rn) = (is_negative(lty), is_negative(rty));
match (ln, rn) {
(false, false) => sub_int(l64, lty, r64, rty, r64 > l64),
(true, true) => sub_int(l64, lty, r64, rty, l64 > r64),
(true, false) => unify_int_type(lty, rty, Minus)
.and_then(|ty| l64.checked_add(r64).map(
|v| ConstantInt(v, ty))),
(false, true) => unify_int_type(lty, rty, Plus)
.and_then(|ty| l64.checked_add(r64).map(
|v| ConstantInt(v, ty))),
}
},
_ => None,
}),
//BiMul,
//BiDiv,
//BiRem,
BiAnd => constant_short_circuit(cx, left, right, false),
BiOr => constant_short_circuit(cx, left, right, true),
BiBitXor => constant_bitop(cx, left, right, |x, y| x ^ y),
BiBitAnd => constant_bitop(cx, left, right, |x, y| x & y),
BiBitOr => constant_bitop(cx, left, right, |x, y| (x | y)),
BiShl => constant_bitop(cx, left, right, |x, y| x << y),
BiShr => constant_bitop(cx, left, right, |x, y| x >> y),
BiEq => constant_binop_apply(cx, left, right,
|l, r| Some(ConstantBool(l == r))),
BiNe => constant_binop_apply(cx, left, right,
|l, r| Some(ConstantBool(l != r))),
BiLt => constant_cmp(cx, left, right, Less, true),
BiLe => constant_cmp(cx, left, right, Greater, false),
BiGe => constant_cmp(cx, left, right, Less, false),
BiGt => constant_cmp(cx, left, right, Greater, true),
_ => None
}
}
fn constant_bitop<F>(cx: &Context, left: &Expr, right: &Expr, f: F)
-> Option<Constant> where F: Fn(u64, u64) -> u64 {
constant_binop_apply(cx, left, right, |l, r| match (l, r) {
(ConstantBool(l), ConstantBool(r)) =>
Some(ConstantBool(f(l as u64, r as u64) != 0)),
(ConstantByte(l8), ConstantByte(r8)) =>
Some(ConstantByte(f(l8 as u64, r8 as u64) as u8)),
(ConstantInt(l, lty), ConstantInt(r, rty)) =>
unify_int_type(lty, rty, Plus).map(|ty| ConstantInt(f(l, r), ty)),
_ => None
})
}
fn constant_cmp(cx: &Context, left: &Expr, right: &Expr, ordering: Ordering,
b: bool) -> Option<Constant> {
constant_binop_apply(cx, left, right, |l, r| l.partial_cmp(&r).map(|o|
ConstantBool(b == (o == ordering))))
}
fn add_neg_int(pos: u64, pty: LitIntType, neg: u64, nty: LitIntType) ->
Option<ConstantVariant> {
if neg > pos {
unify_int_type(nty, pty, Minus).map(|ty| ConstantInt(neg - pos, ty))
} else {
unify_int_type(nty, pty, Plus).map(|ty| ConstantInt(pos - neg, ty))
}
}
fn sub_int(l: u64, lty: LitIntType, r: u64, rty: LitIntType, neg: bool) ->
Option<ConstantVariant> {
unify_int_type(lty, rty, if neg { Minus } else { Plus }).and_then(
|ty| l.checked_sub(r).map(|v| ConstantInt(v, ty)))
}
fn constant_binop_apply<F>(cx: &Context, left: &Expr, right: &Expr, op: F)
-> Option<Constant>
where F: Fn(ConstantVariant, ConstantVariant) -> Option<ConstantVariant> {
if let (Some(Constant { constant: lc, needed_resolution: ln }),
Some(Constant { constant: rc, needed_resolution: rn })) =
(constant(cx, left), constant(cx, right)) {
op(lc, rc).map(|c|
Constant {
needed_resolution: ln || rn,
constant: c,
})
} else { None }
}
fn constant_short_circuit(cx: &Context, left: &Expr, right: &Expr, b: bool) ->
Option<Constant> {
constant(cx, left).and_then(|left|
if let &ConstantBool(lbool) = &left.constant {
if lbool == b {
Some(left)
} else {
constant(cx, right).and_then(|right|
if let ConstantBool(_) = right.constant {
Some(Constant {
constant: right.constant,
needed_resolution: left.needed_resolution ||
right.needed_resolution,
})
} else { None }
)
}
} else { None }
)
}
|
use std::fmt;
use chrono::{DateTime, Local};
use itertools::Itertools;
use json::JsonValue;
// #[derive(Debug, Default)]
// pub struct Error {
// pub detail: String,
// }
//
// #[derive(Debug)]
// pub struct CrateLinks {
// pub owners: Option<String>,
// pub reverse_dependencies: String,
// pub version_downloads: String,
// pub versions: Option<String>,
// }
//
// pub struct Crate {
// pub created_at: String,
// pub description: Option<String>,
// pub documentation: Option<String>,
// pub downloads: i32,
// pub homepage: Option<String>,
// pub id: String,
// pub keywords: Option<Vec<String>>,
// pub license: Option<String>,
// pub links: CrateLinks,
// pub max_version: String,
// pub name: String,
// pub repository: Option<String>,
// pub updated_at: String,
// pub versions: Option<Vec<u64>>,
// }
//
// #[derive(Debug)]
// pub struct Keyword {
// pub crates_cnt: u64,
// pub created_at: String,
// pub id: String,
// pub keyword: String,
// }
//
// #[derive(Debug)]
// pub struct VersionLinks {
// pub authors: String,
// pub dependencies: String,
// pub version_downloads: String,
// }
//
// #[derive(Debug)]
// pub struct Version {
// pub krate: String,
// pub created_at: String,
// pub dl_path: String,
// pub downloads: i32,
// pub features: HashMap<String, Vec<String>>,
// pub id: i32,
// pub links: VersionLinks,
// pub num: String,
// pub updated_at: String,
// pub yanked: bool,
// }
//
// pub struct Reply {
// pub errors: Error,
// pub krate: Crate,
// pub keywords: Vec<Keyword>,
// pub versions: Vec<Version>,
// }
struct TimeStamp(Option<DateTime<Local>>);
impl<'a> From<&'a JsonValue> for TimeStamp {
fn from(jv: &JsonValue) -> Self {
let parse = |s: &str| s.parse::<DateTime<Local>>().ok();
TimeStamp(jv.as_str().and_then(parse))
}
}
impl fmt::Display for TimeStamp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(ts) = self.0 {
f.pad(&format!("{}", ts))
} else {
f.pad("")
}
}
}
pub struct Crate {
krate: JsonValue,
versions: JsonValue,
keywords: JsonValue,
}
impl Crate {
pub fn new(json: &JsonValue) -> Self {
Crate {
krate: json["crate"].clone(),
versions: json["versions"].clone(),
keywords: json["keywords"].clone(),
}
}
pub fn print_repository(&self, verbose: bool) {
if let JsonValue::String(ref repository) = self.krate["repository"] {
let fmt = if verbose {
format!("{:<16}{}", "Repository:", repository)
} else {
repository.clone()
};
println!("{}", fmt);
}
}
pub fn print_documentation(&self, verbose: bool) {
if let JsonValue::String(ref documentation) = self.krate["documentation"] {
let fmt = if verbose {
format!("{:<16}{}", "Documentation:", documentation)
} else {
documentation.clone()
};
println!("{}", fmt);
}
}
pub fn print_downloads(&self, verbose: bool) {
if let JsonValue::Number(downloads) = self.krate["downloads"] {
let fmt = if verbose {
format!("{:<16}{}", "Downloads:", downloads)
} else {
format!("{}", downloads)
};
println!("{}", fmt);
}
}
pub fn print_homepage(&self, verbose: bool) {
if let JsonValue::String(ref homepage) = self.krate["homepage"] {
let fmt = if verbose {
format!("{:<16}{}", "Homepage:", homepage)
} else {
homepage.clone()
};
println!("{}", fmt);
}
}
fn print_version(v: &JsonValue, verbose: bool) {
let created_at = TimeStamp::from(&v["created_at"]);
print!("{:<10}{:<28}{:<11}", v["num"], created_at, v["downloads"]);
if v["yanked"] == "true" {
print!("(yanked)");
}
if verbose {
// Consider adding some more useful information in verbose mode
println!("");
} else {
println!("");
}
}
fn print_version_header(verbose: bool) {
print!("{:<10}{:<28}{:<11}\n",
"VERSION",
"RELEASE DATE",
"DOWNLOADS");
if verbose {
// Consider adding some more useful information in verbose mode
println!("");
} else {
println!("");
}
}
pub fn print_last_versions(&self, limit: usize, verbose: bool) {
Crate::print_version_header(verbose);
self.versions
.members()
.take(limit)
.foreach(|v| Crate::print_version(v, verbose));
let length = self.versions.len();
if limit < length {
println!("\n... use -VV to show all {} versions", length);
}
}
pub fn print_keywords(&self, verbose: bool) {
let fmt = if verbose {
format!("{:#}", self.keywords)
} else {
format!("{}", self.keywords)
};
println!("{}", fmt);
}
}
impl fmt::Display for Crate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let empty = "";
let name = self.krate["name"].as_str().unwrap_or(empty);
let max_version = self.krate["max_version"].as_str().unwrap_or(empty);
let downloads = self.krate["downloads"].as_i32().unwrap_or(0);
let created_at = TimeStamp::from(&self.krate["created_at"]);
let updated_at = TimeStamp::from(&self.krate["updated_at"]);
let description = self.krate["description"].as_str().unwrap_or(empty);
let documentation = self.krate["documentation"].as_str().unwrap_or(empty);
let homepage = self.krate["homepage"].as_str().unwrap_or(empty);
let repository = self.krate["repository"].as_str().unwrap_or(empty);
let license = self.krate["license"].as_str().unwrap_or(empty);
let keywords = self.krate["keywords"]
.members()
.filter_map(|jv| jv.as_str())
.collect::<Vec<_>>();
if f.alternate() {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", name),
format_args!("{:<16}{}", "Version:", max_version),
format_args!("{:<16}{}", "Description:", description),
format_args!("{:<16}{}", "Downloads:", downloads),
format_args!("{:<16}{}", "Homepage:", homepage),
format_args!("{:<16}{}", "Documentation:", documentation),
format_args!("{:<16}{}", "Repository:", repository),
format_args!("{:<16}{}", "License:", license),
format_args!("{:<16}{:?}", "Keywords:", keywords),
format_args!("{:<16}{}", "Created at:", created_at),
format_args!("{:<16}{}", "Updated at:", updated_at))
} else {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", name),
format_args!("{:<16}{}", "Version:", max_version),
format_args!("{:<16}{}", "Description:", description),
format_args!("{:<16}{}", "Downloads:", downloads),
format_args!("{:<16}{}", "Homepage:", homepage),
format_args!("{:<16}{}", "Documentation:", documentation),
format_args!("{:<16}{}", "Repository:", repository))
}
}
}
rustfmt
use std::fmt;
use chrono::{DateTime, Local};
use itertools::Itertools;
use json::JsonValue;
// #[derive(Debug, Default)]
// pub struct Error {
// pub detail: String,
// }
//
// #[derive(Debug)]
// pub struct CrateLinks {
// pub owners: Option<String>,
// pub reverse_dependencies: String,
// pub version_downloads: String,
// pub versions: Option<String>,
// }
//
// pub struct Crate {
// pub created_at: String,
// pub description: Option<String>,
// pub documentation: Option<String>,
// pub downloads: i32,
// pub homepage: Option<String>,
// pub id: String,
// pub keywords: Option<Vec<String>>,
// pub license: Option<String>,
// pub links: CrateLinks,
// pub max_version: String,
// pub name: String,
// pub repository: Option<String>,
// pub updated_at: String,
// pub versions: Option<Vec<u64>>,
// }
//
// #[derive(Debug)]
// pub struct Keyword {
// pub crates_cnt: u64,
// pub created_at: String,
// pub id: String,
// pub keyword: String,
// }
//
// #[derive(Debug)]
// pub struct VersionLinks {
// pub authors: String,
// pub dependencies: String,
// pub version_downloads: String,
// }
//
// #[derive(Debug)]
// pub struct Version {
// pub krate: String,
// pub created_at: String,
// pub dl_path: String,
// pub downloads: i32,
// pub features: HashMap<String, Vec<String>>,
// pub id: i32,
// pub links: VersionLinks,
// pub num: String,
// pub updated_at: String,
// pub yanked: bool,
// }
//
// pub struct Reply {
// pub errors: Error,
// pub krate: Crate,
// pub keywords: Vec<Keyword>,
// pub versions: Vec<Version>,
// }
struct TimeStamp(Option<DateTime<Local>>);
impl<'a> From<&'a JsonValue> for TimeStamp {
fn from(jv: &JsonValue) -> Self {
let parse = |s: &str| s.parse::<DateTime<Local>>().ok();
TimeStamp(jv.as_str().and_then(parse))
}
}
impl fmt::Display for TimeStamp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(ts) = self.0 {
f.pad(&format!("{}", ts))
} else {
f.pad("")
}
}
}
pub struct Crate {
krate: JsonValue,
versions: JsonValue,
keywords: JsonValue,
}
impl Crate {
pub fn new(json: &JsonValue) -> Self {
Crate {
krate: json["crate"].clone(),
versions: json["versions"].clone(),
keywords: json["keywords"].clone(),
}
}
pub fn print_repository(&self, verbose: bool) {
if let JsonValue::String(ref repository) = self.krate["repository"] {
let fmt = if verbose {
format!("{:<16}{}", "Repository:", repository)
} else {
repository.clone()
};
println!("{}", fmt);
}
}
pub fn print_documentation(&self, verbose: bool) {
if let JsonValue::String(ref documentation) = self.krate["documentation"] {
let fmt = if verbose {
format!("{:<16}{}", "Documentation:", documentation)
} else {
documentation.clone()
};
println!("{}", fmt);
}
}
pub fn print_downloads(&self, verbose: bool) {
if let JsonValue::Number(downloads) = self.krate["downloads"] {
let fmt = if verbose {
format!("{:<16}{}", "Downloads:", downloads)
} else {
format!("{}", downloads)
};
println!("{}", fmt);
}
}
pub fn print_homepage(&self, verbose: bool) {
if let JsonValue::String(ref homepage) = self.krate["homepage"] {
let fmt = if verbose {
format!("{:<16}{}", "Homepage:", homepage)
} else {
homepage.clone()
};
println!("{}", fmt);
}
}
fn print_version(v: &JsonValue, verbose: bool) {
let created_at = TimeStamp::from(&v["created_at"]);
print!("{:<10}{:<28}{:<11}", v["num"], created_at, v["downloads"]);
if v["yanked"] == "true" {
print!("(yanked)");
}
if verbose {
// Consider adding some more useful information in verbose mode
println!("");
} else {
println!("");
}
}
fn print_version_header(verbose: bool) {
print!("{:<10}{:<28}{:<11}\n",
"VERSION",
"RELEASE DATE",
"DOWNLOADS");
if verbose {
// Consider adding some more useful information in verbose mode
println!("");
} else {
println!("");
}
}
pub fn print_last_versions(&self, limit: usize, verbose: bool) {
Crate::print_version_header(verbose);
self.versions
.members()
.take(limit)
.foreach(|v| Crate::print_version(v, verbose));
let length = self.versions.len();
if limit < length {
println!("\n... use -VV to show all {} versions", length);
}
}
pub fn print_keywords(&self, verbose: bool) {
let fmt = if verbose {
format!("{:#}", self.keywords)
} else {
format!("{}", self.keywords)
};
println!("{}", fmt);
}
}
impl fmt::Display for Crate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let empty = "";
let name = self.krate["name"].as_str().unwrap_or(empty);
let max_version = self.krate["max_version"].as_str().unwrap_or(empty);
let downloads = self.krate["downloads"].as_i32().unwrap_or(0);
let created_at = TimeStamp::from(&self.krate["created_at"]);
let updated_at = TimeStamp::from(&self.krate["updated_at"]);
let description = self.krate["description"].as_str().unwrap_or(empty);
let documentation = self.krate["documentation"].as_str().unwrap_or(empty);
let homepage = self.krate["homepage"].as_str().unwrap_or(empty);
let repository = self.krate["repository"].as_str().unwrap_or(empty);
let license = self.krate["license"].as_str().unwrap_or(empty);
let keywords = self.krate["keywords"]
.members()
.filter_map(|jv| jv.as_str())
.collect::<Vec<_>>();
if f.alternate() {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", name),
format_args!("{:<16}{}", "Version:", max_version),
format_args!("{:<16}{}", "Description:", description),
format_args!("{:<16}{}", "Downloads:", downloads),
format_args!("{:<16}{}", "Homepage:", homepage),
format_args!("{:<16}{}", "Documentation:", documentation),
format_args!("{:<16}{}", "Repository:", repository),
format_args!("{:<16}{}", "License:", license),
format_args!("{:<16}{:?}", "Keywords:", keywords),
format_args!("{:<16}{}", "Created at:", created_at),
format_args!("{:<16}{}", "Updated at:", updated_at))
} else {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", name),
format_args!("{:<16}{}", "Version:", max_version),
format_args!("{:<16}{}", "Description:", description),
format_args!("{:<16}{}", "Downloads:", downloads),
format_args!("{:<16}{}", "Homepage:", homepage),
format_args!("{:<16}{}", "Documentation:", documentation),
format_args!("{:<16}{}", "Repository:", repository))
}
}
}
|
use std::fmt;
use chrono::{DateTime, Local};
use chrono_humanize::HumanTime;
use json::JsonValue;
// #[derive(Debug, Default)]
// pub struct Error {
// pub detail: String,
// }
//
// #[derive(Debug)]
// pub struct CrateLinks {
// pub owners: Option<String>,
// pub reverse_dependencies: String,
// pub version_downloads: String,
// pub versions: Option<String>,
// }
//
// pub struct Crate {
// pub created_at: String,
// pub description: Option<String>,
// pub documentation: Option<String>,
// pub downloads: i32,
// pub homepage: Option<String>,
// pub id: String,
// pub keywords: Option<Vec<String>>,
// pub license: Option<String>,
// pub links: CrateLinks,
// pub max_version: String,
// pub name: String,
// pub repository: Option<String>,
// pub updated_at: String,
// pub versions: Option<Vec<u64>>,
// }
//
// #[derive(Debug)]
// pub struct Keyword {
// pub crates_cnt: u64,
// pub created_at: String,
// pub id: String,
// pub keyword: String,
// }
//
// #[derive(Debug)]
// pub struct VersionLinks {
// pub authors: String,
// pub dependencies: String,
// pub version_downloads: String,
// }
//
// #[derive(Debug)]
// pub struct Version {
// pub krate: String,
// pub created_at: String,
// pub dl_path: String,
// pub downloads: i32,
// pub features: HashMap<String, Vec<String>>,
// pub id: i32,
// pub links: VersionLinks,
// pub num: String,
// pub updated_at: String,
// pub yanked: bool,
// }
//
// pub struct Reply {
// pub errors: Error,
// pub krate: Crate,
// pub keywords: Vec<Keyword>,
// pub versions: Vec<Version>,
// }
struct TimeStamp(Option<DateTime<Local>>);
impl<'a> From<&'a JsonValue> for TimeStamp {
fn from(jv: &JsonValue) -> Self {
let parse = |s: &str| s.parse::<DateTime<Local>>().ok();
TimeStamp(jv.as_str().and_then(parse))
}
}
impl fmt::Display for TimeStamp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(ts) = self.0 {
if f.alternate() {
f.pad(&format!("{}", HumanTime::from(ts)))
} else {
f.pad(&format!("{}", ts.naive_local()))
}
} else {
f.pad("")
}
}
}
pub struct Crate {
krate: JsonValue,
versions: JsonValue,
keywords: JsonValue,
}
const FIELDS: [&'static str; 5] =
["description", "documentation", "homepage", "repository", "license"];
impl Crate {
pub fn new(json: &JsonValue) -> Self {
let mut krate = json["crate"].clone();
// Fix up fields that may be absent
for field in &FIELDS {
if krate[*field].is_null() {
krate[*field] = "".into();
}
}
Crate {
krate: krate,
versions: json["versions"].clone(),
keywords: json["keywords"].clone(),
}
}
pub fn print_repository(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Repository:", self.krate["repository"])
} else {
format!("{}", self.krate["repository"])
}
// if let JsonValue::String(ref repository) = self.krate["repository"] {
// if verbose {
// format!("{:<16}{}", "Repository:", repository)
// } else {
// repository.clone()
// }
// }
}
pub fn print_documentation(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Documentation:", self.krate["documentation"])
} else {
format!("{}", self.krate["documentation"])
}
// if let JsonValue::String(ref documentation) = self.krate["documentation"] {
// if verbose {
// format!("{:<16}{}", "Documentation:", documentation)
// } else {
// documentation.clone()
// }
// }
}
pub fn print_downloads(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Downloads:", self.krate["downloads"])
} else {
format!("{}", self.krate["downloads"])
}
// if let JsonValue::Number(downloads) = self.krate["downloads"] {
// if verbose {
// format!("{:<16}{}", "Downloads:", downloads)
// } else {
// format!("{}", downloads)
// }
// }
}
pub fn print_homepage(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Homepage:", self.krate["homepage"])
} else {
format!("{}", self.krate["homepage"])
}
// if let JsonValue::String(ref homepage) = self.krate["homepage"] {
// let fmt = if verbose {
// format!("{:<16}{}", "Homepage:", homepage)
// } else {
// homepage.clone()
// };
// println!("{}", fmt);
// }
}
fn print_version(v: &JsonValue, verbose: bool) -> String {
let created_at = TimeStamp::from(&v["created_at"]);
let mut output = format!("{:<11}{:<#16}{:<11}", v["num"], created_at, v["downloads"]);
if v["yanked"] == "true" {
output = output + "(yanked)";
}
if verbose {
// Consider adding some more useful information in verbose mode
output + "\n"
} else {
output + "\n"
}
}
fn print_version_header(verbose: bool) -> String {
let output = format!("{:<11}{:<#16}{:<11}\n", "VERSION", "RELEASED", "DOWNLOADS");
if verbose {
// Consider adding some more useful information in verbose mode
output + "\n"
} else {
output + "\n"
}
}
pub fn print_last_versions(&self, limit: usize, verbose: bool) -> String {
let mut output = Crate::print_version_header(verbose);
for version in self.versions.members().take(limit) {
output = output + &Crate::print_version(version, verbose);
}
let length = self.versions.len();
if limit < length {
output = output + &format!("\n... use -VV to show all {} versions\n", length);
}
output
}
pub fn print_keywords(&self, verbose: bool) -> String {
if verbose {
format!("{:#}", self.keywords)
} else {
format!("{}", self.keywords)
}
}
}
impl fmt::Display for Crate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let created_at = TimeStamp::from(&self.krate["created_at"]);
let updated_at = TimeStamp::from(&self.krate["updated_at"]);
let keywords = self.krate["keywords"]
.members()
.filter_map(|jv| jv.as_str())
.collect::<Vec<_>>();
if f.alternate() {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", self.krate["name"]),
format_args!("{:<16}{}", "Version:", self.krate["max_version"]),
format_args!("{:<16}{}", "Description:", self.krate["description"]),
format_args!("{:<16}{}", "Downloads:", self.krate["downloads"]),
format_args!("{:<16}{}", "Homepage:", self.krate["homepage"]),
format_args!("{:<16}{}", "Documentation:", self.krate["documentation"]),
format_args!("{:<16}{}", "Repository:", self.krate["repository"]),
format_args!("{:<16}{}", "License:", self.krate["license"]),
format_args!("{:<16}{:?}", "Keywords:", keywords),
format_args!("{:<16}{} ({:#})", "Created at:", created_at, created_at),
format_args!("{:<16}{} ({:#})", "Updated at:", updated_at, updated_at))
} else {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", self.krate["name"]),
format_args!("{:<16}{}", "Version:", self.krate["max_version"]),
format_args!("{:<16}{}", "Description:", self.krate["description"]),
format_args!("{:<16}{}", "Downloads:", self.krate["downloads"]),
format_args!("{:<16}{}", "Homepage:", self.krate["homepage"]),
format_args!("{:<16}{}", "Documentation:", self.krate["documentation"]),
format_args!("{:<16}{}", "Repository:", self.krate["repository"]),
format_args!("{:<16}{:#}", "Last updated:", updated_at))
}
}
}
Add brief version history in the default output
Closes #13
use std::fmt;
use chrono::{DateTime, Local};
use chrono_humanize::HumanTime;
use json::JsonValue;
// #[derive(Debug, Default)]
// pub struct Error {
// pub detail: String,
// }
//
// #[derive(Debug)]
// pub struct CrateLinks {
// pub owners: Option<String>,
// pub reverse_dependencies: String,
// pub version_downloads: String,
// pub versions: Option<String>,
// }
//
// pub struct Crate {
// pub created_at: String,
// pub description: Option<String>,
// pub documentation: Option<String>,
// pub downloads: i32,
// pub homepage: Option<String>,
// pub id: String,
// pub keywords: Option<Vec<String>>,
// pub license: Option<String>,
// pub links: CrateLinks,
// pub max_version: String,
// pub name: String,
// pub repository: Option<String>,
// pub updated_at: String,
// pub versions: Option<Vec<u64>>,
// }
//
// #[derive(Debug)]
// pub struct Keyword {
// pub crates_cnt: u64,
// pub created_at: String,
// pub id: String,
// pub keyword: String,
// }
//
// #[derive(Debug)]
// pub struct VersionLinks {
// pub authors: String,
// pub dependencies: String,
// pub version_downloads: String,
// }
//
// #[derive(Debug)]
// pub struct Version {
// pub krate: String,
// pub created_at: String,
// pub dl_path: String,
// pub downloads: i32,
// pub features: HashMap<String, Vec<String>>,
// pub id: i32,
// pub links: VersionLinks,
// pub num: String,
// pub updated_at: String,
// pub yanked: bool,
// }
//
// pub struct Reply {
// pub errors: Error,
// pub krate: Crate,
// pub keywords: Vec<Keyword>,
// pub versions: Vec<Version>,
// }
struct TimeStamp(Option<DateTime<Local>>);
impl<'a> From<&'a JsonValue> for TimeStamp {
fn from(jv: &JsonValue) -> Self {
let parse = |s: &str| s.parse::<DateTime<Local>>().ok();
TimeStamp(jv.as_str().and_then(parse))
}
}
impl fmt::Display for TimeStamp {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(ts) = self.0 {
if f.alternate() {
f.pad(&format!("{}", HumanTime::from(ts)))
} else {
f.pad(&format!("{}", ts.naive_local()))
}
} else {
f.pad("")
}
}
}
pub struct Crate {
krate: JsonValue,
versions: JsonValue,
keywords: JsonValue,
}
const FIELDS: [&'static str; 5] =
["description", "documentation", "homepage", "repository", "license"];
impl Crate {
pub fn new(json: &JsonValue) -> Self {
let mut krate = json["crate"].clone();
// Fix up fields that may be absent
for field in &FIELDS {
if krate[*field].is_null() {
krate[*field] = "".into();
}
}
Crate {
krate: krate,
versions: json["versions"].clone(),
keywords: json["keywords"].clone(),
}
}
pub fn print_repository(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Repository:", self.krate["repository"])
} else {
format!("{}", self.krate["repository"])
}
// if let JsonValue::String(ref repository) = self.krate["repository"] {
// if verbose {
// format!("{:<16}{}", "Repository:", repository)
// } else {
// repository.clone()
// }
// }
}
pub fn print_documentation(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Documentation:", self.krate["documentation"])
} else {
format!("{}", self.krate["documentation"])
}
// if let JsonValue::String(ref documentation) = self.krate["documentation"] {
// if verbose {
// format!("{:<16}{}", "Documentation:", documentation)
// } else {
// documentation.clone()
// }
// }
}
pub fn print_downloads(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Downloads:", self.krate["downloads"])
} else {
format!("{}", self.krate["downloads"])
}
// if let JsonValue::Number(downloads) = self.krate["downloads"] {
// if verbose {
// format!("{:<16}{}", "Downloads:", downloads)
// } else {
// format!("{}", downloads)
// }
// }
}
pub fn print_homepage(&self, verbose: bool) -> String {
if verbose {
format!("{:<16}{}", "Homepage:", self.krate["homepage"])
} else {
format!("{}", self.krate["homepage"])
}
// if let JsonValue::String(ref homepage) = self.krate["homepage"] {
// let fmt = if verbose {
// format!("{:<16}{}", "Homepage:", homepage)
// } else {
// homepage.clone()
// };
// println!("{}", fmt);
// }
}
fn print_version(v: &JsonValue, verbose: bool) -> String {
let created_at = TimeStamp::from(&v["created_at"]);
let mut output = format!("{:<11}{:<#16}{:<11}", v["num"], created_at, v["downloads"]);
if v["yanked"] == "true" {
output = output + "(yanked)";
}
if verbose {
// Consider adding some more useful information in verbose mode
output + "\n"
} else {
output + "\n"
}
}
fn print_version_header(verbose: bool) -> String {
let output = format!("{:<11}{:<#16}{:<11}\n", "VERSION", "RELEASED", "DOWNLOADS");
if verbose {
// Consider adding some more useful information in verbose mode
output + "\n"
} else {
output + "\n"
}
}
pub fn print_last_versions(&self, limit: usize, verbose: bool) -> String {
let mut output = Crate::print_version_header(verbose);
for version in self.versions.members().take(limit) {
output = output + &Crate::print_version(version, verbose);
}
let length = self.versions.len();
if limit < length {
output = output + &format!("\n... use -VV to show all {} versions\n", length);
}
output
}
pub fn print_keywords(&self, verbose: bool) -> String {
if verbose {
format!("{:#}", self.keywords)
} else {
format!("{}", self.keywords)
}
}
}
impl fmt::Display for Crate {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let created_at = TimeStamp::from(&self.krate["created_at"]);
let updated_at = TimeStamp::from(&self.krate["updated_at"]);
let keywords = self.krate["keywords"]
.members()
.filter_map(|jv| jv.as_str())
.collect::<Vec<_>>();
if f.alternate() {
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", self.krate["name"]),
format_args!("{:<16}{}", "Version:", self.krate["max_version"]),
format_args!("{:<16}{}", "Description:", self.krate["description"]),
format_args!("{:<16}{}", "Downloads:", self.krate["downloads"]),
format_args!("{:<16}{}", "Homepage:", self.krate["homepage"]),
format_args!("{:<16}{}", "Documentation:", self.krate["documentation"]),
format_args!("{:<16}{}", "Repository:", self.krate["repository"]),
format_args!("{:<16}{}", "License:", self.krate["license"]),
format_args!("{:<16}{:?}", "Keywords:", keywords),
format_args!("{:<16}{} ({:#})", "Created at:", created_at, created_at),
format_args!("{:<16}{} ({:#})", "Updated at:", updated_at, updated_at))
} else {
let mut versions = String::new();
for line in self.print_last_versions(5, false).lines() {
versions = versions + "\n";
if !line.is_empty() {
versions = versions + " " + line;
}
}
write!(f,
"{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}",
format_args!("{:<16}{}", "Crate:", self.krate["name"]),
format_args!("{:<16}{}", "Version:", self.krate["max_version"]),
format_args!("{:<16}{}", "Description:", self.krate["description"]),
format_args!("{:<16}{}", "Downloads:", self.krate["downloads"]),
format_args!("{:<16}{}", "Homepage:", self.krate["homepage"]),
format_args!("{:<16}{}", "Documentation:", self.krate["documentation"]),
format_args!("{:<16}{}", "Repository:", self.krate["repository"]),
format_args!("{:<16}{:#}", "Last updated:", updated_at),
format_args!("{:<16}\n{}", "Version history:", versions))
}
}
}
|
use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
use crate::CollectionAllocErr;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ops::Index;
/// Default hasher for `HashMap`.
#[cfg(feature = "ahash")]
pub type DefaultHashBuilder = ahash::ABuildHasher;
/// Dummy default hasher for `HashMap`.
#[cfg(not(feature = "ahash"))]
pub enum DefaultHashBuilder {}
/// A hash map implemented with quadratic probing and SIMD lookup.
///
/// The default hashing algorithm is currently [`AHash`], though this is
/// subject to change at any point in the future. This hash function is very
/// fast for all types of keys, but this algorithm will typically *not* protect
/// against attacks such as HashDoS.
///
/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
///
/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
/// If you implement these yourself, it is important that the following
/// property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
/// It is a logic error for a key to be modified in such a way that the key's
/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
/// the [`Eq`] trait, changes while it is in the map. This is normally only
/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// It is also a logic error for the [`Hash`] implementation of a key to panic.
/// This is generally only possible if the trait is implemented manually. If a
/// panic does occur then the contents of the `HashMap` may become corrupted and
/// some items may be dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashMap<String, String>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // Review some books.
/// book_reviews.insert(
/// "Adventures of Huckleberry Finn".to_string(),
/// "My favorite book.".to_string(),
/// );
/// book_reviews.insert(
/// "Grimms' Fairy Tales".to_string(),
/// "Masterpiece.".to_string(),
/// );
/// book_reviews.insert(
/// "Pride and Prejudice".to_string(),
/// "Very enjoyable.".to_string(),
/// );
/// book_reviews.insert(
/// "The Adventures of Sherlock Holmes".to_string(),
/// "Eye lyked it alot.".to_string(),
/// );
///
/// // Check for a specific one.
/// // When collections store owned values (String), they can still be
/// // queried using references (&str).
/// if !book_reviews.contains_key("Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove("The Adventures of Sherlock Holmes");
///
/// // Look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for &book in &to_find {
/// match book_reviews.get(book) {
/// Some(review) => println!("{}: {}", book, review),
/// None => println!("{} is unreviewed.", book)
/// }
/// }
///
/// // Look up the value for a key (will panic if the key is not found).
/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
///
/// // Iterate over everything.
/// for (book, review) in &book_reviews {
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
///
/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
/// for more complex methods of getting, setting, updating and removing keys and
/// their values:
///
/// ```
/// use hashbrown::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, u8>` in this example).
/// let mut player_stats = HashMap::new();
///
/// fn random_stat_buff() -> u8 {
/// // could actually return some random value here - let's just return
/// // some fixed value for now
/// 42
/// }
///
/// // insert a key only if it doesn't already exist
/// player_stats.entry("health").or_insert(100);
///
/// // insert a key using a function that provides a new value only if it
/// // doesn't already exist
/// player_stats.entry("defence").or_insert_with(random_stat_buff);
///
/// // update a key, guarding against the key possibly not being set
/// let stat = player_stats.entry("attack").or_insert(100);
/// *stat += random_stat_buff();
/// ```
///
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
/// [`AHash`]: https://crates.io/crates/ahash
///
/// ```
/// use hashbrown::HashMap;
///
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking {
/// name: String,
/// country: String,
/// }
///
/// impl Viking {
/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
/// }
///
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in &vikings {
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
///
/// A `HashMap` with fixed list of elements can be initialized from an array:
///
/// ```
/// use hashbrown::HashMap;
///
/// fn main() {
/// let timber_resources: HashMap<&str, i32> =
/// [("Norway", 100),
/// ("Denmark", 50),
/// ("Iceland", 10)]
/// .iter().cloned().collect();
/// // use the values stored in map
/// }
/// ```
#[derive(Clone)]
pub struct HashMap<K, V, S = DefaultHashBuilder> {
pub(crate) hash_builder: S,
pub(crate) table: RawTable<(K, V)>,
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
let mut state = hash_builder.build_hasher();
val.hash(&mut state);
state.finish()
}
#[cfg(feature = "ahash")]
impl<K, V> HashMap<K, V, DefaultHashBuilder> {
/// Creates an empty `HashMap`.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `HashMap` with the specified capacity.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(capacity),
}
}
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let hasher = DefaultHashBuilder::default();
/// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &DefaultHashBuilder = map.hasher();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn hasher(&self) -> &S {
&self.hash_builder
}
/// Returns the number of elements the map can hold without reallocating.
///
/// This number is a lower bound; the `HashMap<K, V>` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn capacity(&self) -> usize {
self.table.capacity()
}
/// An iterator visiting all keys in arbitrary order.
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order.
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order.
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values_mut() {
/// *val = *val + 10;
/// }
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// The iterator element type is `(&'a K, &'a V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter(&self) -> Iter<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Iter {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// The iterator element type is `(&'a K, &'a mut V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Update all values
/// for (_, val) in map.iter_mut() {
/// *val *= 2;
/// }
///
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
IterMut {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
#[cfg(test)]
#[cfg_attr(feature = "inline-more", inline)]
fn raw_capacity(&self) -> usize {
self.table.buckets()
}
/// Returns the number of elements in the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn len(&self) -> usize {
self.table.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
/// assert!(v == "a" || v == "b");
/// }
///
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain(&mut self) -> Drain<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Drain {
inner: self.table.drain(),
}
}
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn clear(&mut self) {
self.table.clear();
}
}
impl<K, V, S> HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashMap`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize) {
let hash_builder = &self.hash_builder;
self.table
.reserve(additional, |x| make_hash(hash_builder, &x.0));
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
/// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Errors
///
/// If the capacity overflows, or the allocator reports a failure, then an error
/// is returned.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, isize> = HashMap::new();
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
let hash_builder = &self.hash_builder;
self.table
.try_reserve(additional, |x| make_hash(hash_builder, &x.0))
}
/// Shrinks the capacity of the map as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to_fit();
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to_fit(&mut self) {
let hash_builder = &self.hash_builder;
self.table.shrink_to(0, |x| make_hash(hash_builder, &x.0));
}
/// Shrinks the capacity of the map with a lower limit. It will drop
/// down no lower than the supplied limit while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// This function does nothing if the current capacity is smaller than the
/// supplied minimum capacity.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 10);
/// map.shrink_to(0);
/// assert!(map.capacity() >= 2);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to(&mut self, min_capacity: usize) {
let hash_builder = &self.hash_builder;
self.table
.shrink_to(min_capacity, |x| make_hash(hash_builder, &x.0));
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut letters = HashMap::new();
///
/// for ch in "a short treatise on fungi".chars() {
/// let counter = letters.entry(ch).or_insert(0);
/// *counter += 1;
/// }
///
/// assert_eq!(letters[&'s'], 2);
/// assert_eq!(letters[&'t'], 3);
/// assert_eq!(letters[&'u'], 1);
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
Entry::Occupied(OccupiedEntry {
key: Some(key),
elem,
table: self,
})
} else {
Entry::Vacant(VacantEntry {
hash,
key,
table: self,
})
}
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get_key_value(k).map(|(_, v)| v)
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get(k).is_some()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe { &mut item.as_mut().1 })
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(&x.0)) {
Some(mem::replace(&mut item.as_mut().1, v))
} else {
let hash_builder = &self.hash_builder;
self.table
.insert(hash, (k, v), |x| make_hash(hash_builder, &x.0));
None
}
}
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.remove_entry(k).map(|(_, v)| v)
}
/// Removes a key from the map, returning the stored key and value if the
/// key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// # fn main() {
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove_entry(&1), Some((1, "a")));
/// assert_eq!(map.remove(&1), None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(x.0.borrow())) {
self.table.erase_no_drop(&item);
Some(item.read())
} else {
None
}
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
for item in self.table.iter() {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
// Erase the element from the table first since drop might panic.
self.table.erase_no_drop(&item);
item.drop();
}
}
}
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates a raw entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched. After this, insertions into a vacant entry
/// still require an owned key to be provided.
///
/// Raw entries are useful for such exotic situations as:
///
/// * Hash memoization
/// * Deferring the creation of an owned key until it is known to be required
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Because raw entries provide much more low-level control, it's much easier
/// to put the HashMap into an inconsistent state which, while memory-safe,
/// will cause the map to produce seemingly random results. Higher-level and
/// more foolproof APIs like `entry` should be preferred when possible.
///
/// In particular, the hash used to initialized the raw entry must still be
/// consistent with the hash of the key that is ultimately stored in the entry.
/// This is because implementations of HashMap may need to recompute hashes
/// when resizing, at which point only the keys are available.
///
/// Raw entries give mutable access to the keys. This must not be used
/// to modify how the key would compare or hash, as the map will not re-evaluate
/// where the key should go, meaning the keys may become "lost" if their
/// location does not reflect their state. For instance, if you change a key
/// so that the map now contains keys which compare equal, search may start
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
RawEntryBuilderMut { map: self }
}
/// Creates a raw immutable entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched.
///
/// This is useful for
/// * Hash memoization
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Unless you are in such a situation, higher-level and more foolproof APIs like
/// `get` should be preferred.
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
RawEntryBuilder { map: self }
}
}
impl<K, V, S> PartialEq for HashMap<K, V, S>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
impl<K, V, S> Eq for HashMap<K, V, S>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
{
}
impl<K, V, S> Debug for HashMap<K, V, S>
where
K: Debug,
V: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, S> Default for HashMap<K, V, S>
where
S: Default,
{
/// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher.
#[cfg_attr(feature = "inline-more", inline)]
fn default() -> Self {
Self::with_hasher(Default::default())
}
}
impl<K, Q: ?Sized, V, S> Index<&Q> for HashMap<K, V, S>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[cfg_attr(feature = "inline-more", inline)]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
/// An iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
pub struct Iter<'a, K, V> {
inner: RawIter<(K, V)>,
marker: PhantomData<(&'a K, &'a V)>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A mutable iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
pub struct IterMut<'a, K, V> {
inner: RawIter<(K, V)>,
// To ensure invariance with respect to V
marker: PhantomData<(&'a K, &'a mut V)>,
}
// We override the default Send impl which has K: Sync instead of K: Send. Both
// are correct, but this one is more general since it allows keys which
// implement Send but not Sync.
unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
impl<K, V> IterMut<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
/// An owning iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
pub struct IntoIter<K, V> {
inner: RawIntoIter<(K, V)>,
}
impl<K, V> IntoIter<K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// An iterator over the keys of a `HashMap`.
///
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
pub struct Keys<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Keys {
inner: self.inner.clone(),
}
}
}
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// An iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Values {
inner: self.inner.clone(),
}
}
}
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A draining iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
pub struct Drain<'a, K, V> {
inner: RawDrain<'a, (K, V)>,
}
impl<K, V> Drain<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
pub struct RawEntryBuilderMut<'a, K, V, S> {
map: &'a mut HashMap<K, V, S>,
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This is a lower-level version of [`Entry`].
///
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S> {
/// An occupied entry.
Occupied(RawOccupiedEntryMut<'a, K, V>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawOccupiedEntryMut<'a, K, V> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V)>,
}
unsafe impl<K, V> Send for RawOccupiedEntryMut<'_, K, V>
where
K: Send,
V: Send,
{
}
unsafe impl<K, V> Sync for RawOccupiedEntryMut<'_, K, V>
where
K: Sync,
V: Sync,
{
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawVacantEntryMut<'a, K, V, S> {
table: &'a mut RawTable<(K, V)>,
hash_builder: &'a S,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
pub struct RawEntryBuilder<'a, K, V, S> {
map: &'a HashMap<K, V, S>,
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
self.search(hash, is_match)
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
match self.map.table.find(hash, |(k, _)| is_match(k)) {
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
}
}
}
impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Access an entry by a key and its hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.map
.table
.find(hash, |(k, _)| is_match(k))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Access an entry by hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.search(hash, is_match)
}
}
impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
/// Sets the value of the entry, and returns a RawOccupiedEntryMut.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
///
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
/// ("poneyland", "hoho".to_string())
/// });
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 0);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
{
match self {
RawEntryMut::Occupied(mut entry) => {
{
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
RawEntryMut::Occupied(entry)
}
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
}
impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Gets a mutable reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Gets a reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a mutable reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&mut self) -> (&K, &V) {
unsafe {
let &(ref key, ref value) = self.elem.as_ref();
(key, value)
}
}
/// Gets a mutable reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K {
mem::replace(self.key_mut(), key)
}
/// Takes the value out of the entry, and returns it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
}
impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
self.insert_hashed_nocheck(hasher.finish(), key, value)
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
self.insert_with_hasher(hash, key, value, |k| make_hash(hash_builder, k))
}
/// Set the value of an entry with a custom hasher function.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
unsafe {
let elem = self.table.insert(hash, (key, value), |x| hasher(&x.0));
let &mut (ref mut k, ref mut v) = elem.as_mut();
(k, v)
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let elem = self.table.insert(hasher.finish(), (key, value), |k| {
make_hash(hash_builder, &k.0)
});
RawOccupiedEntryMut {
elem,
table: self.table,
}
}
}
impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
impl<K: Debug, V: Debug, S> Debug for RawEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
}
}
}
impl<K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K, V, S> {
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V, S>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V, S>),
}
impl<K: Debug, V: Debug, S> Debug for Entry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S> {
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S>,
}
unsafe impl<K, V, S> Send for OccupiedEntry<'_, K, V, S>
where
K: Send,
V: Send,
S: Send,
{
}
unsafe impl<K, V, S> Sync for OccupiedEntry<'_, K, V, S>
where
K: Sync,
V: Sync,
S: Sync,
{
}
impl<K: Debug, V: Debug, S> Debug for OccupiedEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct VacantEntry<'a, K, V, S> {
hash: u64,
key: K,
table: &'a mut HashMap<K, V, S>,
}
impl<K: Debug, V, S> Debug for VacantEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> Iter<'a, K, V> {
self.iter()
}
}
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IterMut<'a, K, V> {
self.iter_mut()
}
}
impl<K, V, S> IntoIterator for HashMap<K, V, S> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IntoIter<K, V> {
IntoIter {
inner: self.table.into_iter(),
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_ref();
(&r.0, &r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Iter<'_, K, V> {}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_mut();
(&r.0, &mut r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IntoIter<K, V> {}
impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
self.inner.next().map(|(k, _)| k)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Keys<'_, K, V> {}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Values<'_, K, V> {}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter()).finish()
}
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Drain<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Drain<'_, K, V> {}
impl<K, V> fmt::Debug for Drain<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V, S> Entry<'a, K, V, S> {
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.entry("horseyland").insert(37);
///
/// assert_eq!(entry.key(), &"horseyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland").or_insert(3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.entry("poneyland").or_insert(10) *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default()),
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
}
impl<'a, K, V: Default, S> Entry<'a, K, V, S> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_default(self) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Take the ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
///
/// assert_eq!(map["poneyland"], 24);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
///
/// assert_eq!(map["poneyland"], 22);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
///
/// assert_eq!(map["poneyland"], 15);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
value
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Replaces the entry, returning the old key and value. The new key in the hash map will be
/// the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// map.insert(Rc::new("Stringthing".to_string()), 15);
///
/// let my_key = Rc::new("Stringthing".to_string());
///
/// if let Entry::Occupied(entry) = map.entry(my_key) {
/// // Also replace the key with a handle to our other key.
/// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
/// }
///
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry(self, value: V) -> (K, V) {
let entry = unsafe { self.elem.as_mut() };
let old_key = mem::replace(&mut entry.0, self.key.unwrap());
let old_value = mem::replace(&mut entry.1, value);
(old_key, old_value)
}
/// Replaces the key in the hash map with the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// let mut known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
/// reclaim_memory(&mut map, &known_strings);
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
/// if let Entry::Occupied(entry) = map.entry(s.clone()) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
/// }
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_key(self) -> K {
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
}
impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
/// Gets a reference to the key that would be used when inserting a value
/// through the `VacantEntry`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let bucket = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
unsafe { &mut bucket.as_mut().1 }
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let elem = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
OccupiedEntry {
key: None,
elem,
table: self.table,
}
}
}
impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher + Default,
{
#[cfg_attr(feature = "inline-more", inline)]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default());
iter.for_each(|(k, v)| {
map.insert(k, v);
});
map
}
}
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
};
self.reserve(reserve);
iter.for_each(move |(k, v)| {
self.insert(k, v);
});
}
}
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
}
#[allow(dead_code)]
fn assert_covariance() {
fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
v
}
fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
v
}
fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> {
v
}
fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> {
v
}
fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
v
}
fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
v
}
fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
v
}
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v
}
fn drain<'new>(
d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d
}
}
#[cfg(test)]
mod test_map {
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
#[cfg(not(miri))]
use crate::CollectionAllocErr::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
use std::vec::Vec;
#[test]
fn test_zero_capacities() {
type HM = HashMap<i32, i32>;
let m = HM::new();
assert_eq!(m.capacity(), 0);
let m = HM::default();
assert_eq!(m.capacity(), 0);
let m = HM::with_hasher(DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity(0);
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.insert(1, 1);
m.insert(2, 2);
m.remove(&1);
m.remove(&2);
m.shrink_to_fit();
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.reserve(0);
assert_eq!(m.capacity(), 0);
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1).is_none());
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&2).unwrap(), 4);
}
#[test]
fn test_clone() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
let m2 = m.clone();
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
struct Droppable {
k: usize,
}
impl Droppable {
fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
Droppable { k }
}
}
impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
});
}
}
impl Clone for Droppable {
fn clone(&self) -> Self {
Droppable::new(self.k)
}
}
#[test]
fn test_drops() {
DROP_VECTOR.with(|slot| {
*slot.borrow_mut() = vec![0; 200];
});
{
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0..50 {
let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
});
}
DROP_VECTOR.with(|v| {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i + 100], 0);
}
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
}
});
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_into_iter_drops() {
DROP_VECTOR.with(|v| {
*v.borrow_mut() = vec![0; 200];
});
let hm = {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
hm
};
// By the way, ensure that cloning doesn't screw up the dropping.
drop(hm.clone());
{
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
assert_eq!(nk, 50);
assert_eq!(nv, 50);
});
};
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_empty_remove() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
}
assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1);
}
#[test]
fn test_empty_iter() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
assert_eq!(m.values_mut().next(), None);
assert_eq!(m.iter().next(), None);
assert_eq!(m.iter_mut().next(), None);
assert_eq!(m.len(), 0);
assert!(m.is_empty());
assert_eq!(m.into_iter().next(), None);
}
#[test]
#[cfg(not(miri))] // FIXME: https://github.com/rust-lang/miri/issues/654
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in 0..10 {
assert!(m.is_empty());
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
for j in i + 1..1001 {
let r = m.get(&j);
assert_eq!(r, None);
}
}
for i in 1001..2001 {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..=i {
assert!(!m.contains_key(&j));
}
for j in i + 1..1001 {
assert!(m.contains_key(&j));
}
}
for i in 1..1001 {
assert!(!m.contains_key(&i));
}
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
}
// remove backwards
for i in (1..1001).rev() {
assert!(m.remove(&i).is_some());
for j in i..1001 {
assert!(!m.contains_key(&j));
}
for j in 1..i {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12).is_none());
assert!(m.insert(2, 8).is_none());
assert!(m.insert(5, 14).is_none());
let new = 100;
match m.get_mut(&5) {
None => panic!(),
Some(x) => *x = new,
}
assert_eq!(m.get(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(!m.insert(1, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(m.insert(5, 3).is_none());
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(m.insert(5, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&9).unwrap(), 4);
assert!(m.remove(&1).is_some());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(!m.is_empty());
assert!(m.remove(&1).is_some());
assert!(m.is_empty());
}
#[test]
fn test_remove() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove(&1), Some(2));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_remove_entry() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove_entry(&1), Some((1, 2)));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0..32 {
assert!(m.insert(i, i * 2).is_none());
}
assert_eq!(m.len(), 32);
let mut observed: u32 = 0;
for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: HashMap<_, _> = vec.into_iter().collect();
for value in map.values_mut() {
*value = (*value) * 2
}
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.get(&1).is_none());
m.insert(1, 2);
match m.get(&1) {
None => panic!(),
Some(v) => assert_eq!(*v, 2),
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_show() {
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0;
let old_raw_cap = m.raw_capacity();
while old_raw_cap == m.raw_capacity() {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_behavior_resize_policy() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.raw_capacity(), 1);
assert!(m.is_empty());
m.insert(0, 0);
m.remove(&0);
assert!(m.is_empty());
let initial_raw_cap = m.raw_capacity();
m.reserve(initial_raw_cap);
let raw_cap = m.raw_capacity();
assert_eq!(raw_cap, initial_raw_cap * 2);
let mut i = 0;
for _ in 0..raw_cap * 3 / 4 {
m.insert(i, i);
i += 1;
}
// three quarters full
assert_eq!(m.len(), i);
assert_eq!(m.raw_capacity(), raw_cap);
for _ in 0..raw_cap / 4 {
m.insert(i, i);
i += 1;
}
// half full
let new_raw_cap = m.raw_capacity();
assert_eq!(new_raw_cap, raw_cap * 2);
for _ in 0..raw_cap / 2 - 1 {
i -= 1;
m.remove(&i);
assert_eq!(m.raw_capacity(), new_raw_cap);
}
// A little more than one quarter full.
m.shrink_to_fit();
assert_eq!(m.raw_capacity(), raw_cap);
// again, a little more than half full
for _ in 0..raw_cap / 2 {
i -= 1;
m.remove(&i);
}
m.shrink_to_fit();
assert_eq!(m.len(), i);
assert!(!m.is_empty());
assert_eq!(m.raw_capacity(), initial_raw_cap);
}
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
assert_eq!(m.len(), 100);
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
m.insert(0, 0);
assert_eq!(m.len(), 1);
assert!(m.capacity() >= m.len());
assert_eq!(m.remove(&0), Some(0));
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
}
#[test]
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_index() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
assert_eq!(map[&2], 1);
}
#[test]
#[should_panic]
fn test_index_nonexistent() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
map[&4];
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
// FIXME: https://github.com/rust-lang/miri/issues/653
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {}
Occupied(e) => {
e.remove();
}
}
check(&m);
}
}
#[test]
fn test_extend_ref() {
let mut a = HashMap::new();
a.insert(1, "one");
let mut b = HashMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_capacity_not_less_than_len() {
let mut a = HashMap::new();
let mut item = 0;
for _ in 0..116 {
a.insert(item, 0);
item += 1;
}
assert!(a.capacity() > a.len());
let free = a.capacity() - a.len();
for _ in 0..free {
a.insert(item, 0);
item += 1;
}
assert_eq!(a.len(), a.capacity());
// Insert at capacity should cause allocation.
a.insert(item, 0);
assert!(a.capacity() > a.len());
}
#[test]
fn test_occupied_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
assert_eq!(map[&2], 20);
assert_eq!(map[&4], 40);
assert_eq!(map[&6], 60);
}
#[test]
#[cfg(not(miri))] // FIXME: https://github.com/rust-lang/miri/issues/655
fn test_try_reserve() {
let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
const MAX_USIZE: usize = usize::MAX;
if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
} else {
panic!("usize::MAX should trigger an overflow!");
}
if let Err(AllocErr { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
if let Err(AllocErr { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
}
}
#[test]
fn test_raw_entry() {
use super::RawEntryMut::{Occupied, Vacant};
let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
use core::hash::{BuildHasher, Hash, Hasher};
let mut hasher = map.hasher().build_hasher();
k.hash(&mut hasher);
hasher.finish()
};
// Existing key (insert)
match map.raw_entry_mut().from_key(&1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
let hash1 = compute_hash(&map, 1);
assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
assert_eq!(
map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
(&1, &100)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
(&1, &100)
);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.raw_entry_mut().from_key(&2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
let hash2 = compute_hash(&map, 2);
assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
assert_eq!(
map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
(&2, &200)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
(&2, &200)
);
assert_eq!(map.len(), 6);
// Existing key (take)
let hash3 = compute_hash(&map, 3);
match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove_entry(), (3, 30));
}
}
assert_eq!(map.raw_entry().from_key(&3), None);
assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
assert_eq!(map.len(), 5);
// Nonexistent key (insert)
match map.raw_entry_mut().from_key(&10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
}
}
assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
assert_eq!(map.len(), 6);
// Ensure all lookup methods produce equivalent results.
for k in 0..12 {
let hash = compute_hash(&map, k);
let v = map.get(&k).cloned();
let kv = v.as_ref().map(|v| (&k, v));
assert_eq!(map.raw_entry().from_key(&k), kv);
assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
match map.raw_entry_mut().from_key(&k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
}
}
#[test]
fn test_key_without_hash_impl() {
#[derive(Debug)]
struct IntWrapper(u64);
let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(e) => e,
RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
};
occupied_entry.remove();
}
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
}
update Miri comments
use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
use crate::CollectionAllocErr;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ops::Index;
/// Default hasher for `HashMap`.
#[cfg(feature = "ahash")]
pub type DefaultHashBuilder = ahash::ABuildHasher;
/// Dummy default hasher for `HashMap`.
#[cfg(not(feature = "ahash"))]
pub enum DefaultHashBuilder {}
/// A hash map implemented with quadratic probing and SIMD lookup.
///
/// The default hashing algorithm is currently [`AHash`], though this is
/// subject to change at any point in the future. This hash function is very
/// fast for all types of keys, but this algorithm will typically *not* protect
/// against attacks such as HashDoS.
///
/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
///
/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
/// If you implement these yourself, it is important that the following
/// property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
/// It is a logic error for a key to be modified in such a way that the key's
/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
/// the [`Eq`] trait, changes while it is in the map. This is normally only
/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// It is also a logic error for the [`Hash`] implementation of a key to panic.
/// This is generally only possible if the trait is implemented manually. If a
/// panic does occur then the contents of the `HashMap` may become corrupted and
/// some items may be dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashMap<String, String>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // Review some books.
/// book_reviews.insert(
/// "Adventures of Huckleberry Finn".to_string(),
/// "My favorite book.".to_string(),
/// );
/// book_reviews.insert(
/// "Grimms' Fairy Tales".to_string(),
/// "Masterpiece.".to_string(),
/// );
/// book_reviews.insert(
/// "Pride and Prejudice".to_string(),
/// "Very enjoyable.".to_string(),
/// );
/// book_reviews.insert(
/// "The Adventures of Sherlock Holmes".to_string(),
/// "Eye lyked it alot.".to_string(),
/// );
///
/// // Check for a specific one.
/// // When collections store owned values (String), they can still be
/// // queried using references (&str).
/// if !book_reviews.contains_key("Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove("The Adventures of Sherlock Holmes");
///
/// // Look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for &book in &to_find {
/// match book_reviews.get(book) {
/// Some(review) => println!("{}: {}", book, review),
/// None => println!("{} is unreviewed.", book)
/// }
/// }
///
/// // Look up the value for a key (will panic if the key is not found).
/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
///
/// // Iterate over everything.
/// for (book, review) in &book_reviews {
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
///
/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
/// for more complex methods of getting, setting, updating and removing keys and
/// their values:
///
/// ```
/// use hashbrown::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, u8>` in this example).
/// let mut player_stats = HashMap::new();
///
/// fn random_stat_buff() -> u8 {
/// // could actually return some random value here - let's just return
/// // some fixed value for now
/// 42
/// }
///
/// // insert a key only if it doesn't already exist
/// player_stats.entry("health").or_insert(100);
///
/// // insert a key using a function that provides a new value only if it
/// // doesn't already exist
/// player_stats.entry("defence").or_insert_with(random_stat_buff);
///
/// // update a key, guarding against the key possibly not being set
/// let stat = player_stats.entry("attack").or_insert(100);
/// *stat += random_stat_buff();
/// ```
///
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
/// [`AHash`]: https://crates.io/crates/ahash
///
/// ```
/// use hashbrown::HashMap;
///
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking {
/// name: String,
/// country: String,
/// }
///
/// impl Viking {
/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
/// }
///
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in &vikings {
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
///
/// A `HashMap` with fixed list of elements can be initialized from an array:
///
/// ```
/// use hashbrown::HashMap;
///
/// fn main() {
/// let timber_resources: HashMap<&str, i32> =
/// [("Norway", 100),
/// ("Denmark", 50),
/// ("Iceland", 10)]
/// .iter().cloned().collect();
/// // use the values stored in map
/// }
/// ```
#[derive(Clone)]
pub struct HashMap<K, V, S = DefaultHashBuilder> {
pub(crate) hash_builder: S,
pub(crate) table: RawTable<(K, V)>,
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
let mut state = hash_builder.build_hasher();
val.hash(&mut state);
state.finish()
}
#[cfg(feature = "ahash")]
impl<K, V> HashMap<K, V, DefaultHashBuilder> {
/// Creates an empty `HashMap`.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `HashMap` with the specified capacity.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(capacity),
}
}
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let hasher = DefaultHashBuilder::default();
/// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &DefaultHashBuilder = map.hasher();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn hasher(&self) -> &S {
&self.hash_builder
}
/// Returns the number of elements the map can hold without reallocating.
///
/// This number is a lower bound; the `HashMap<K, V>` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn capacity(&self) -> usize {
self.table.capacity()
}
/// An iterator visiting all keys in arbitrary order.
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order.
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order.
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values_mut() {
/// *val = *val + 10;
/// }
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// The iterator element type is `(&'a K, &'a V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter(&self) -> Iter<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Iter {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// The iterator element type is `(&'a K, &'a mut V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Update all values
/// for (_, val) in map.iter_mut() {
/// *val *= 2;
/// }
///
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
IterMut {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
#[cfg(test)]
#[cfg_attr(feature = "inline-more", inline)]
fn raw_capacity(&self) -> usize {
self.table.buckets()
}
/// Returns the number of elements in the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn len(&self) -> usize {
self.table.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
/// assert!(v == "a" || v == "b");
/// }
///
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain(&mut self) -> Drain<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Drain {
inner: self.table.drain(),
}
}
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn clear(&mut self) {
self.table.clear();
}
}
impl<K, V, S> HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashMap`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize) {
let hash_builder = &self.hash_builder;
self.table
.reserve(additional, |x| make_hash(hash_builder, &x.0));
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
/// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Errors
///
/// If the capacity overflows, or the allocator reports a failure, then an error
/// is returned.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, isize> = HashMap::new();
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
let hash_builder = &self.hash_builder;
self.table
.try_reserve(additional, |x| make_hash(hash_builder, &x.0))
}
/// Shrinks the capacity of the map as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to_fit();
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to_fit(&mut self) {
let hash_builder = &self.hash_builder;
self.table.shrink_to(0, |x| make_hash(hash_builder, &x.0));
}
/// Shrinks the capacity of the map with a lower limit. It will drop
/// down no lower than the supplied limit while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// This function does nothing if the current capacity is smaller than the
/// supplied minimum capacity.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 10);
/// map.shrink_to(0);
/// assert!(map.capacity() >= 2);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to(&mut self, min_capacity: usize) {
let hash_builder = &self.hash_builder;
self.table
.shrink_to(min_capacity, |x| make_hash(hash_builder, &x.0));
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut letters = HashMap::new();
///
/// for ch in "a short treatise on fungi".chars() {
/// let counter = letters.entry(ch).or_insert(0);
/// *counter += 1;
/// }
///
/// assert_eq!(letters[&'s'], 2);
/// assert_eq!(letters[&'t'], 3);
/// assert_eq!(letters[&'u'], 1);
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
Entry::Occupied(OccupiedEntry {
key: Some(key),
elem,
table: self,
})
} else {
Entry::Vacant(VacantEntry {
hash,
key,
table: self,
})
}
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get_key_value(k).map(|(_, v)| v)
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get(k).is_some()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe { &mut item.as_mut().1 })
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(&x.0)) {
Some(mem::replace(&mut item.as_mut().1, v))
} else {
let hash_builder = &self.hash_builder;
self.table
.insert(hash, (k, v), |x| make_hash(hash_builder, &x.0));
None
}
}
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.remove_entry(k).map(|(_, v)| v)
}
/// Removes a key from the map, returning the stored key and value if the
/// key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// # fn main() {
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove_entry(&1), Some((1, "a")));
/// assert_eq!(map.remove(&1), None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(x.0.borrow())) {
self.table.erase_no_drop(&item);
Some(item.read())
} else {
None
}
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
for item in self.table.iter() {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
// Erase the element from the table first since drop might panic.
self.table.erase_no_drop(&item);
item.drop();
}
}
}
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates a raw entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched. After this, insertions into a vacant entry
/// still require an owned key to be provided.
///
/// Raw entries are useful for such exotic situations as:
///
/// * Hash memoization
/// * Deferring the creation of an owned key until it is known to be required
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Because raw entries provide much more low-level control, it's much easier
/// to put the HashMap into an inconsistent state which, while memory-safe,
/// will cause the map to produce seemingly random results. Higher-level and
/// more foolproof APIs like `entry` should be preferred when possible.
///
/// In particular, the hash used to initialized the raw entry must still be
/// consistent with the hash of the key that is ultimately stored in the entry.
/// This is because implementations of HashMap may need to recompute hashes
/// when resizing, at which point only the keys are available.
///
/// Raw entries give mutable access to the keys. This must not be used
/// to modify how the key would compare or hash, as the map will not re-evaluate
/// where the key should go, meaning the keys may become "lost" if their
/// location does not reflect their state. For instance, if you change a key
/// so that the map now contains keys which compare equal, search may start
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
RawEntryBuilderMut { map: self }
}
/// Creates a raw immutable entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched.
///
/// This is useful for
/// * Hash memoization
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Unless you are in such a situation, higher-level and more foolproof APIs like
/// `get` should be preferred.
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
RawEntryBuilder { map: self }
}
}
impl<K, V, S> PartialEq for HashMap<K, V, S>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
impl<K, V, S> Eq for HashMap<K, V, S>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
{
}
impl<K, V, S> Debug for HashMap<K, V, S>
where
K: Debug,
V: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, S> Default for HashMap<K, V, S>
where
S: Default,
{
/// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher.
#[cfg_attr(feature = "inline-more", inline)]
fn default() -> Self {
Self::with_hasher(Default::default())
}
}
impl<K, Q: ?Sized, V, S> Index<&Q> for HashMap<K, V, S>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[cfg_attr(feature = "inline-more", inline)]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
/// An iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
pub struct Iter<'a, K, V> {
inner: RawIter<(K, V)>,
marker: PhantomData<(&'a K, &'a V)>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A mutable iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
pub struct IterMut<'a, K, V> {
inner: RawIter<(K, V)>,
// To ensure invariance with respect to V
marker: PhantomData<(&'a K, &'a mut V)>,
}
// We override the default Send impl which has K: Sync instead of K: Send. Both
// are correct, but this one is more general since it allows keys which
// implement Send but not Sync.
unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
impl<K, V> IterMut<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
/// An owning iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
pub struct IntoIter<K, V> {
inner: RawIntoIter<(K, V)>,
}
impl<K, V> IntoIter<K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// An iterator over the keys of a `HashMap`.
///
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
pub struct Keys<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Keys {
inner: self.inner.clone(),
}
}
}
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// An iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Values {
inner: self.inner.clone(),
}
}
}
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A draining iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
pub struct Drain<'a, K, V> {
inner: RawDrain<'a, (K, V)>,
}
impl<K, V> Drain<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
pub struct RawEntryBuilderMut<'a, K, V, S> {
map: &'a mut HashMap<K, V, S>,
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This is a lower-level version of [`Entry`].
///
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S> {
/// An occupied entry.
Occupied(RawOccupiedEntryMut<'a, K, V>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawOccupiedEntryMut<'a, K, V> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V)>,
}
unsafe impl<K, V> Send for RawOccupiedEntryMut<'_, K, V>
where
K: Send,
V: Send,
{
}
unsafe impl<K, V> Sync for RawOccupiedEntryMut<'_, K, V>
where
K: Sync,
V: Sync,
{
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawVacantEntryMut<'a, K, V, S> {
table: &'a mut RawTable<(K, V)>,
hash_builder: &'a S,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
pub struct RawEntryBuilder<'a, K, V, S> {
map: &'a HashMap<K, V, S>,
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
self.search(hash, is_match)
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
match self.map.table.find(hash, |(k, _)| is_match(k)) {
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
}
}
}
impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Access an entry by a key and its hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.map
.table
.find(hash, |(k, _)| is_match(k))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Access an entry by hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.search(hash, is_match)
}
}
impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
/// Sets the value of the entry, and returns a RawOccupiedEntryMut.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
///
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
/// ("poneyland", "hoho".to_string())
/// });
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 0);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
{
match self {
RawEntryMut::Occupied(mut entry) => {
{
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
RawEntryMut::Occupied(entry)
}
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
}
impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Gets a mutable reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Gets a reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a mutable reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&mut self) -> (&K, &V) {
unsafe {
let &(ref key, ref value) = self.elem.as_ref();
(key, value)
}
}
/// Gets a mutable reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K {
mem::replace(self.key_mut(), key)
}
/// Takes the value out of the entry, and returns it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
}
impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
self.insert_hashed_nocheck(hasher.finish(), key, value)
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
self.insert_with_hasher(hash, key, value, |k| make_hash(hash_builder, k))
}
/// Set the value of an entry with a custom hasher function.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
unsafe {
let elem = self.table.insert(hash, (key, value), |x| hasher(&x.0));
let &mut (ref mut k, ref mut v) = elem.as_mut();
(k, v)
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let elem = self.table.insert(hasher.finish(), (key, value), |k| {
make_hash(hash_builder, &k.0)
});
RawOccupiedEntryMut {
elem,
table: self.table,
}
}
}
impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
impl<K: Debug, V: Debug, S> Debug for RawEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
}
}
}
impl<K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K, V, S> {
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V, S>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V, S>),
}
impl<K: Debug, V: Debug, S> Debug for Entry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S> {
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S>,
}
unsafe impl<K, V, S> Send for OccupiedEntry<'_, K, V, S>
where
K: Send,
V: Send,
S: Send,
{
}
unsafe impl<K, V, S> Sync for OccupiedEntry<'_, K, V, S>
where
K: Sync,
V: Sync,
S: Sync,
{
}
impl<K: Debug, V: Debug, S> Debug for OccupiedEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct VacantEntry<'a, K, V, S> {
hash: u64,
key: K,
table: &'a mut HashMap<K, V, S>,
}
impl<K: Debug, V, S> Debug for VacantEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> Iter<'a, K, V> {
self.iter()
}
}
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IterMut<'a, K, V> {
self.iter_mut()
}
}
impl<K, V, S> IntoIterator for HashMap<K, V, S> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IntoIter<K, V> {
IntoIter {
inner: self.table.into_iter(),
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_ref();
(&r.0, &r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Iter<'_, K, V> {}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_mut();
(&r.0, &mut r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IntoIter<K, V> {}
impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
self.inner.next().map(|(k, _)| k)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Keys<'_, K, V> {}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Values<'_, K, V> {}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter()).finish()
}
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Drain<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Drain<'_, K, V> {}
impl<K, V> fmt::Debug for Drain<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V, S> Entry<'a, K, V, S> {
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.entry("horseyland").insert(37);
///
/// assert_eq!(entry.key(), &"horseyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland").or_insert(3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.entry("poneyland").or_insert(10) *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default()),
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
}
impl<'a, K, V: Default, S> Entry<'a, K, V, S> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_default(self) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Take the ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
///
/// assert_eq!(map["poneyland"], 24);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
///
/// assert_eq!(map["poneyland"], 22);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
///
/// assert_eq!(map["poneyland"], 15);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
value
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Replaces the entry, returning the old key and value. The new key in the hash map will be
/// the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// map.insert(Rc::new("Stringthing".to_string()), 15);
///
/// let my_key = Rc::new("Stringthing".to_string());
///
/// if let Entry::Occupied(entry) = map.entry(my_key) {
/// // Also replace the key with a handle to our other key.
/// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
/// }
///
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry(self, value: V) -> (K, V) {
let entry = unsafe { self.elem.as_mut() };
let old_key = mem::replace(&mut entry.0, self.key.unwrap());
let old_value = mem::replace(&mut entry.1, value);
(old_key, old_value)
}
/// Replaces the key in the hash map with the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// let mut known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
/// reclaim_memory(&mut map, &known_strings);
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
/// if let Entry::Occupied(entry) = map.entry(s.clone()) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
/// }
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_key(self) -> K {
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
}
impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
/// Gets a reference to the key that would be used when inserting a value
/// through the `VacantEntry`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let bucket = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
unsafe { &mut bucket.as_mut().1 }
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let elem = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
OccupiedEntry {
key: None,
elem,
table: self.table,
}
}
}
impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher + Default,
{
#[cfg_attr(feature = "inline-more", inline)]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default());
iter.for_each(|(k, v)| {
map.insert(k, v);
});
map
}
}
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
};
self.reserve(reserve);
iter.for_each(move |(k, v)| {
self.insert(k, v);
});
}
}
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
}
#[allow(dead_code)]
fn assert_covariance() {
fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
v
}
fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
v
}
fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> {
v
}
fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> {
v
}
fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
v
}
fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
v
}
fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
v
}
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v
}
fn drain<'new>(
d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d
}
}
#[cfg(test)]
mod test_map {
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
#[cfg(not(miri))]
use crate::CollectionAllocErr::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
use std::vec::Vec;
#[test]
fn test_zero_capacities() {
type HM = HashMap<i32, i32>;
let m = HM::new();
assert_eq!(m.capacity(), 0);
let m = HM::default();
assert_eq!(m.capacity(), 0);
let m = HM::with_hasher(DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity(0);
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.insert(1, 1);
m.insert(2, 2);
m.remove(&1);
m.remove(&2);
m.shrink_to_fit();
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.reserve(0);
assert_eq!(m.capacity(), 0);
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1).is_none());
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&2).unwrap(), 4);
}
#[test]
fn test_clone() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
let m2 = m.clone();
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
struct Droppable {
k: usize,
}
impl Droppable {
fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
Droppable { k }
}
}
impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
});
}
}
impl Clone for Droppable {
fn clone(&self) -> Self {
Droppable::new(self.k)
}
}
#[test]
fn test_drops() {
DROP_VECTOR.with(|slot| {
*slot.borrow_mut() = vec![0; 200];
});
{
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0..50 {
let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
});
}
DROP_VECTOR.with(|v| {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i + 100], 0);
}
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
}
});
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_into_iter_drops() {
DROP_VECTOR.with(|v| {
*v.borrow_mut() = vec![0; 200];
});
let hm = {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
hm
};
// By the way, ensure that cloning doesn't screw up the dropping.
drop(hm.clone());
{
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
assert_eq!(nk, 50);
assert_eq!(nv, 50);
});
};
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_empty_remove() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
}
assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1);
}
#[test]
fn test_empty_iter() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
assert_eq!(m.values_mut().next(), None);
assert_eq!(m.iter().next(), None);
assert_eq!(m.iter_mut().next(), None);
assert_eq!(m.len(), 0);
assert!(m.is_empty());
assert_eq!(m.into_iter().next(), None);
}
#[test]
#[cfg(not(miri))] // FIXME: takes too long
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in 0..10 {
assert!(m.is_empty());
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
for j in i + 1..1001 {
let r = m.get(&j);
assert_eq!(r, None);
}
}
for i in 1001..2001 {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..=i {
assert!(!m.contains_key(&j));
}
for j in i + 1..1001 {
assert!(m.contains_key(&j));
}
}
for i in 1..1001 {
assert!(!m.contains_key(&i));
}
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
}
// remove backwards
for i in (1..1001).rev() {
assert!(m.remove(&i).is_some());
for j in i..1001 {
assert!(!m.contains_key(&j));
}
for j in 1..i {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12).is_none());
assert!(m.insert(2, 8).is_none());
assert!(m.insert(5, 14).is_none());
let new = 100;
match m.get_mut(&5) {
None => panic!(),
Some(x) => *x = new,
}
assert_eq!(m.get(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(!m.insert(1, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(m.insert(5, 3).is_none());
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(m.insert(5, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&9).unwrap(), 4);
assert!(m.remove(&1).is_some());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(!m.is_empty());
assert!(m.remove(&1).is_some());
assert!(m.is_empty());
}
#[test]
fn test_remove() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove(&1), Some(2));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_remove_entry() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove_entry(&1), Some((1, 2)));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0..32 {
assert!(m.insert(i, i * 2).is_none());
}
assert_eq!(m.len(), 32);
let mut observed: u32 = 0;
for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: HashMap<_, _> = vec.into_iter().collect();
for value in map.values_mut() {
*value = (*value) * 2
}
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.get(&1).is_none());
m.insert(1, 2);
match m.get(&1) {
None => panic!(),
Some(v) => assert_eq!(*v, 2),
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_show() {
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0;
let old_raw_cap = m.raw_capacity();
while old_raw_cap == m.raw_capacity() {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_behavior_resize_policy() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.raw_capacity(), 1);
assert!(m.is_empty());
m.insert(0, 0);
m.remove(&0);
assert!(m.is_empty());
let initial_raw_cap = m.raw_capacity();
m.reserve(initial_raw_cap);
let raw_cap = m.raw_capacity();
assert_eq!(raw_cap, initial_raw_cap * 2);
let mut i = 0;
for _ in 0..raw_cap * 3 / 4 {
m.insert(i, i);
i += 1;
}
// three quarters full
assert_eq!(m.len(), i);
assert_eq!(m.raw_capacity(), raw_cap);
for _ in 0..raw_cap / 4 {
m.insert(i, i);
i += 1;
}
// half full
let new_raw_cap = m.raw_capacity();
assert_eq!(new_raw_cap, raw_cap * 2);
for _ in 0..raw_cap / 2 - 1 {
i -= 1;
m.remove(&i);
assert_eq!(m.raw_capacity(), new_raw_cap);
}
// A little more than one quarter full.
m.shrink_to_fit();
assert_eq!(m.raw_capacity(), raw_cap);
// again, a little more than half full
for _ in 0..raw_cap / 2 {
i -= 1;
m.remove(&i);
}
m.shrink_to_fit();
assert_eq!(m.len(), i);
assert!(!m.is_empty());
assert_eq!(m.raw_capacity(), initial_raw_cap);
}
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
assert_eq!(m.len(), 100);
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
m.insert(0, 0);
assert_eq!(m.len(), 1);
assert!(m.capacity() >= m.len());
assert_eq!(m.remove(&0), Some(0));
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
}
#[test]
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_index() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
assert_eq!(map[&2], 1);
}
#[test]
#[should_panic]
fn test_index_nonexistent() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
map[&4];
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {}
Occupied(e) => {
e.remove();
}
}
check(&m);
}
}
#[test]
fn test_extend_ref() {
let mut a = HashMap::new();
a.insert(1, "one");
let mut b = HashMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_capacity_not_less_than_len() {
let mut a = HashMap::new();
let mut item = 0;
for _ in 0..116 {
a.insert(item, 0);
item += 1;
}
assert!(a.capacity() > a.len());
let free = a.capacity() - a.len();
for _ in 0..free {
a.insert(item, 0);
item += 1;
}
assert_eq!(a.len(), a.capacity());
// Insert at capacity should cause allocation.
a.insert(item, 0);
assert!(a.capacity() > a.len());
}
#[test]
fn test_occupied_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
assert_eq!(map[&2], 20);
assert_eq!(map[&4], 40);
assert_eq!(map[&6], 60);
}
#[test]
#[cfg(not(miri))] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
fn test_try_reserve() {
let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
const MAX_USIZE: usize = usize::MAX;
if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
} else {
panic!("usize::MAX should trigger an overflow!");
}
if let Err(AllocErr { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
if let Err(AllocErr { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
}
}
#[test]
fn test_raw_entry() {
use super::RawEntryMut::{Occupied, Vacant};
let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
use core::hash::{BuildHasher, Hash, Hasher};
let mut hasher = map.hasher().build_hasher();
k.hash(&mut hasher);
hasher.finish()
};
// Existing key (insert)
match map.raw_entry_mut().from_key(&1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
let hash1 = compute_hash(&map, 1);
assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
assert_eq!(
map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
(&1, &100)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
(&1, &100)
);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.raw_entry_mut().from_key(&2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
let hash2 = compute_hash(&map, 2);
assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
assert_eq!(
map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
(&2, &200)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
(&2, &200)
);
assert_eq!(map.len(), 6);
// Existing key (take)
let hash3 = compute_hash(&map, 3);
match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove_entry(), (3, 30));
}
}
assert_eq!(map.raw_entry().from_key(&3), None);
assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
assert_eq!(map.len(), 5);
// Nonexistent key (insert)
match map.raw_entry_mut().from_key(&10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
}
}
assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
assert_eq!(map.len(), 6);
// Ensure all lookup methods produce equivalent results.
for k in 0..12 {
let hash = compute_hash(&map, k);
let v = map.get(&k).cloned();
let kv = v.as_ref().map(|v| (&k, v));
assert_eq!(map.raw_entry().from_key(&k), kv);
assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
match map.raw_entry_mut().from_key(&k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
}
}
#[test]
fn test_key_without_hash_impl() {
#[derive(Debug)]
struct IntWrapper(u64);
let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(e) => e,
RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
};
occupied_entry.remove();
}
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
}
|
use id::{Id, IdIndex, MAXIMUM_CAPACITY};
use flat::{FlatAccess, FlatAccessMut, Flat, FlatGet, FlatGetMut};
pub type IdMapVec<E, T> = IdMap<E, Vec<T>>;
#[derive(Debug)]
pub struct IdMap<E, F: Flat> {
lookup: Vec<Id<E>>,
reverse_lookup: Vec<IdIndex>,
flat: F,
}
impl<E, F: Flat> IdMap<E, F> {
pub fn new() -> Self {
IdMap {
lookup: Vec::new(),
reverse_lookup: Vec::new(),
flat: F::new(),
}
}
pub fn with_capacity(capacity: usize) -> Self {
assert!(capacity <= MAXIMUM_CAPACITY);
IdMap {
lookup: Vec::with_capacity(capacity),
reverse_lookup: Vec::with_capacity(capacity),
flat: F::with_capacity(capacity),
}
}
#[inline]
pub fn insert(&mut self, id: Id<E>, mut element: F::Element) -> Option<F::Element> {
let IdMap { ref mut lookup, ref mut reverse_lookup, ref mut flat } = *self;
let new_index = {
let len = reverse_lookup.len();
assert!(len <= MAXIMUM_CAPACITY);
len as IdIndex
};
let usize_index = id.index as usize;
let lookup_id = if usize_index < lookup.len() {
let lookup_id = &mut lookup[usize_index];
element = match flat.replace(lookup_id.index as usize, element) {
Ok(value) => {
return if id.tag == lookup_id.tag {
Some(value)
} else {
lookup_id.tag = id.tag;
None
}
}
Err(element) => element,
};
lookup_id
} else {
lookup.resize(usize_index + 1, Id::invalid());
&mut lookup[usize_index]
};
*lookup_id = Id { index: new_index, ..id };
reverse_lookup.push(id.index);
flat.push(element);
None
}
#[inline]
pub fn remove(&mut self, id: Id<E>) -> Option<F::Element> {
let IdMap { ref mut lookup, ref mut reverse_lookup, ref mut flat } = *self;
let usize_index = id.index as usize;
let lookup_index = match lookup.get_mut(usize_index) {
Some(lookup_id) => {
if lookup_id.tag != id.tag {
return None;
}
let index = lookup_id.index;
*lookup_id = Id::invalid();
index
}
None => return None,
};
let usize_lookup_index = lookup_index as usize;
let old_value = match flat.swap_remove(usize_lookup_index) {
Some(old_value) => old_value,
None => return None,
};
reverse_lookup.swap_remove(usize_lookup_index);
reverse_lookup.get(usize_lookup_index)
.map(|&reverse_index| lookup[reverse_index as usize].index = lookup_index);
Some(old_value)
}
#[inline]
pub fn get<'a>(&'a self, id: Id<E>) -> Option<<&'a F as FlatGet>::ElementRef>
where &'a F: FlatGet
{
match self.lookup.get(id.index as usize) {
Some(lookup_id) if lookup_id.tag == id.tag => {
self.flat.flat_get(lookup_id.index as usize)
}
_ => None,
}
}
#[inline]
pub fn get_mut<'a>(&'a mut self, id: Id<E>) -> Option<<&'a mut F as FlatGetMut>::ElementRefMut>
where &'a mut F: FlatGetMut
{
match self.lookup.get(id.index as usize) {
Some(lookup_id) if lookup_id.tag == id.tag => {
self.flat.flat_get_mut(lookup_id.index as usize)
}
_ => None,
}
}
#[inline]
pub fn id_to_index(&self, id: Id<E>) -> Option<usize> {
match self.lookup.get(id.index as usize) {
Some(lookup_id) if lookup_id.tag == id.tag => Some(lookup_id.index as usize),
_ => None,
}
}
#[inline]
pub fn index_to_id(&self, index: usize) -> Option<Id<E>> {
match self.reverse_lookup.get(index) {
Some(&lookup_index) => {
Some(Id { index: lookup_index, ..self.lookup[lookup_index as usize] })
}
_ => None,
}
}
#[inline]
pub fn access<'a>(&'a self) -> <&'a F as FlatAccess>::Access
where &'a F: FlatAccess
{
self.flat.flat_access()
}
#[inline]
pub fn access_mut<'a>(&'a mut self) -> <&'a mut F as FlatAccessMut>::AccessMut
where &'a mut F: FlatAccessMut
{
self.flat.flat_access_mut()
}
}
#[cfg(test)]
mod tests {
use super::*;
use slab::IdSlab;
fn setup() -> (IdSlab<()>, IdMapVec<(), u32>) {
(IdSlab::new(), IdMapVec::new())
}
#[test]
fn insert_twice_returns_first_and_respects_tag() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
assert_eq!(map.insert(id1, 1), None);
assert_eq!(map.insert(id1, 2), Some(1));
slab.remove(id1);
let id2 = slab.insert(());
assert_eq!(map.insert(id2, 3), None);
assert_eq!(map.insert(id2, 4), Some(3));
assert_eq!(map.insert(id1, 4), None);
}
#[test]
fn insert_get_returns_correct() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
let id2 = slab.insert(());
assert_eq!(map.get(id1), None);
assert_eq!(map.get(id2), None);
assert_eq!(map.insert(id1, 1), None);
assert_eq!(map.insert(id2, 2), None);
assert_eq!(map.get(id1), Some(&1));
assert_eq!(map.get(id2), Some(&2));
assert_eq!(map.get_mut(id1).map(|x| *x), Some(1));
assert_eq!(map.get_mut(id2).map(|x| *x), Some(2));
slab.remove(id1);
slab.remove(id2);
let id3 = slab.insert(());
let id4 = slab.insert(());
assert_eq!(map.get(id3), None);
assert_eq!(map.get(id4), None);
assert_eq!(map.insert(id3, 3), None);
assert_eq!(map.insert(id4, 4), None);
assert_eq!(map.get(id1), None);
assert_eq!(map.get(id2), None);
assert_eq!(map.get(id3), Some(&3));
assert_eq!(map.get(id4), Some(&4));
assert_eq!(map.get_mut(id3).map(|x| *x), Some(3));
assert_eq!(map.get_mut(id4).map(|x| *x), Some(4));
}
#[test]
fn modify_through_get_mut() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
assert_eq!(map.get(id1), None);
assert_eq!(map.insert(id1, 1), None);
*map.get_mut(id1).unwrap() = 10;
assert_eq!(map.get(id1), Some(&10));
assert_eq!(map.insert(id1, 20), Some(10));
}
#[test]
fn insert_remove_get() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
let id2 = slab.insert(());
assert_eq!(map.insert(id1, 1), None);
assert_eq!(map.remove(id1), Some(1));
assert_eq!(map.remove(id1), None);
assert_eq!(map.get(id1), None);
assert_eq!(map.insert(id2, 20), None);
assert_eq!(map.insert(id1, 10), None);
assert_eq!(map.remove(id1), Some(10));
assert_eq!(map.get(id2), Some(&20));
assert_eq!(map.remove(id2), Some(20));
assert_eq!(map.get(id2), None);
}
}
add IdMap::len
use id::{Id, IdIndex, MAXIMUM_CAPACITY};
use flat::{FlatAccess, FlatAccessMut, Flat, FlatGet, FlatGetMut};
pub type IdMapVec<E, T> = IdMap<E, Vec<T>>;
#[derive(Debug)]
pub struct IdMap<E, F: Flat> {
lookup: Vec<Id<E>>,
reverse_lookup: Vec<IdIndex>,
flat: F,
}
impl<E, F: Flat> IdMap<E, F> {
pub fn new() -> Self {
IdMap {
lookup: Vec::new(),
reverse_lookup: Vec::new(),
flat: F::new(),
}
}
pub fn len(&self) -> usize {
self.reverse_lookup.len()
}
pub fn with_capacity(capacity: usize) -> Self {
assert!(capacity <= MAXIMUM_CAPACITY);
IdMap {
lookup: Vec::with_capacity(capacity),
reverse_lookup: Vec::with_capacity(capacity),
flat: F::with_capacity(capacity),
}
}
#[inline]
pub fn insert(&mut self, id: Id<E>, mut element: F::Element) -> Option<F::Element> {
let IdMap { ref mut lookup, ref mut reverse_lookup, ref mut flat } = *self;
let new_index = {
let len = reverse_lookup.len();
assert!(len <= MAXIMUM_CAPACITY);
len as IdIndex
};
let usize_index = id.index as usize;
let lookup_id = if usize_index < lookup.len() {
let lookup_id = &mut lookup[usize_index];
element = match flat.replace(lookup_id.index as usize, element) {
Ok(value) => {
return if id.tag == lookup_id.tag {
Some(value)
} else {
lookup_id.tag = id.tag;
None
}
}
Err(element) => element,
};
lookup_id
} else {
lookup.resize(usize_index + 1, Id::invalid());
&mut lookup[usize_index]
};
*lookup_id = Id { index: new_index, ..id };
reverse_lookup.push(id.index);
flat.push(element);
None
}
#[inline]
pub fn remove(&mut self, id: Id<E>) -> Option<F::Element> {
let IdMap { ref mut lookup, ref mut reverse_lookup, ref mut flat } = *self;
let usize_index = id.index as usize;
let lookup_index = match lookup.get_mut(usize_index) {
Some(lookup_id) => {
if lookup_id.tag != id.tag {
return None;
}
let index = lookup_id.index;
*lookup_id = Id::invalid();
index
}
None => return None,
};
let usize_lookup_index = lookup_index as usize;
let old_value = match flat.swap_remove(usize_lookup_index) {
Some(old_value) => old_value,
None => return None,
};
reverse_lookup.swap_remove(usize_lookup_index);
reverse_lookup.get(usize_lookup_index)
.map(|&reverse_index| lookup[reverse_index as usize].index = lookup_index);
Some(old_value)
}
#[inline]
pub fn get<'a>(&'a self, id: Id<E>) -> Option<<&'a F as FlatGet>::ElementRef>
where &'a F: FlatGet
{
match self.lookup.get(id.index as usize) {
Some(lookup_id) if lookup_id.tag == id.tag => {
self.flat.flat_get(lookup_id.index as usize)
}
_ => None,
}
}
#[inline]
pub fn get_mut<'a>(&'a mut self, id: Id<E>) -> Option<<&'a mut F as FlatGetMut>::ElementRefMut>
where &'a mut F: FlatGetMut
{
match self.lookup.get(id.index as usize) {
Some(lookup_id) if lookup_id.tag == id.tag => {
self.flat.flat_get_mut(lookup_id.index as usize)
}
_ => None,
}
}
#[inline]
pub fn id_to_index(&self, id: Id<E>) -> Option<usize> {
match self.lookup.get(id.index as usize) {
Some(lookup_id) if lookup_id.tag == id.tag => Some(lookup_id.index as usize),
_ => None,
}
}
#[inline]
pub fn index_to_id(&self, index: usize) -> Option<Id<E>> {
match self.reverse_lookup.get(index) {
Some(&lookup_index) => {
Some(Id { index: lookup_index, ..self.lookup[lookup_index as usize] })
}
_ => None,
}
}
#[inline]
pub fn access<'a>(&'a self) -> <&'a F as FlatAccess>::Access
where &'a F: FlatAccess
{
self.flat.flat_access()
}
#[inline]
pub fn access_mut<'a>(&'a mut self) -> <&'a mut F as FlatAccessMut>::AccessMut
where &'a mut F: FlatAccessMut
{
self.flat.flat_access_mut()
}
}
#[cfg(test)]
mod tests {
use super::*;
use slab::IdSlab;
fn setup() -> (IdSlab<()>, IdMapVec<(), u32>) {
(IdSlab::new(), IdMapVec::new())
}
#[test]
fn insert_twice_returns_first_and_respects_tag() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
assert_eq!(map.insert(id1, 1), None);
assert_eq!(map.insert(id1, 2), Some(1));
slab.remove(id1);
let id2 = slab.insert(());
assert_eq!(map.insert(id2, 3), None);
assert_eq!(map.insert(id2, 4), Some(3));
assert_eq!(map.insert(id1, 4), None);
}
#[test]
fn insert_get_returns_correct() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
let id2 = slab.insert(());
assert_eq!(map.get(id1), None);
assert_eq!(map.get(id2), None);
assert_eq!(map.insert(id1, 1), None);
assert_eq!(map.insert(id2, 2), None);
assert_eq!(map.get(id1), Some(&1));
assert_eq!(map.get(id2), Some(&2));
assert_eq!(map.get_mut(id1).map(|x| *x), Some(1));
assert_eq!(map.get_mut(id2).map(|x| *x), Some(2));
slab.remove(id1);
slab.remove(id2);
let id3 = slab.insert(());
let id4 = slab.insert(());
assert_eq!(map.get(id3), None);
assert_eq!(map.get(id4), None);
assert_eq!(map.insert(id3, 3), None);
assert_eq!(map.insert(id4, 4), None);
assert_eq!(map.get(id1), None);
assert_eq!(map.get(id2), None);
assert_eq!(map.get(id3), Some(&3));
assert_eq!(map.get(id4), Some(&4));
assert_eq!(map.get_mut(id3).map(|x| *x), Some(3));
assert_eq!(map.get_mut(id4).map(|x| *x), Some(4));
}
#[test]
fn modify_through_get_mut() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
assert_eq!(map.get(id1), None);
assert_eq!(map.insert(id1, 1), None);
*map.get_mut(id1).unwrap() = 10;
assert_eq!(map.get(id1), Some(&10));
assert_eq!(map.insert(id1, 20), Some(10));
}
#[test]
fn insert_remove_get() {
let (mut slab, mut map) = setup();
let id1 = slab.insert(());
let id2 = slab.insert(());
assert_eq!(map.insert(id1, 1), None);
assert_eq!(map.remove(id1), Some(1));
assert_eq!(map.remove(id1), None);
assert_eq!(map.get(id1), None);
assert_eq!(map.insert(id2, 20), None);
assert_eq!(map.insert(id1, 10), None);
assert_eq!(map.remove(id1), Some(10));
assert_eq!(map.get(id2), Some(&20));
assert_eq!(map.remove(id2), Some(20));
assert_eq!(map.get(id2), None);
}
}
|
//! Useful parser combinators
//!
//! A number of useful parser combinators have already been implemented.
//! Some of them use macros, other are implemented through functions.
//! Hopefully, the syntax will converge to onely one way in the future,
//! but the macros system makes no promises.
//!
#[cfg(feature = "alloc")]
use lib::std::boxed::Box;
#[cfg(feature = "std")]
use lib::std::fmt::Debug;
use internal::*;
use traits::{AsChar, InputIter, InputLength, InputTakeAtPosition};
use traits::{need_more, need_more_err, AtEof};
use lib::std::ops::{Range, RangeFrom, RangeTo};
use traits::{Compare, CompareResult, Offset, Slice};
use util::ErrorKind;
use lib::std::mem::transmute;
#[cfg(feature = "alloc")]
#[inline]
pub fn tag_cl<'a, 'b>(rec: &'a [u8]) -> Box<Fn(&'b [u8]) -> IResult<&'b [u8], &'b [u8]> + 'a> {
Box::new(move |i: &'b [u8]| -> IResult<&'b [u8], &'b [u8]> {
if i.len() >= rec.len() && &i[0..rec.len()] == rec {
Ok((&i[rec.len()..], &i[0..rec.len()]))
} else {
let e: ErrorKind<u32> = ErrorKind::TagClosure;
Err(Err::Error(error_position!(i, e)))
}
})
}
#[cfg(feature = "std")]
#[inline]
pub fn print<T: Debug>(input: T) -> IResult<T, ()> {
println!("{:?}", input);
Ok((input, ()))
}
#[inline]
pub fn begin(input: &[u8]) -> IResult<(), &[u8]> {
Ok(((), input))
}
pub fn crlf<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + AtEof,
T: Compare<&'static str>,
{
match input.compare("\r\n") {
//FIXME: is this the right index?
CompareResult::Ok => Ok((input.slice(2..), input.slice(0..2))),
CompareResult::Incomplete => need_more_err(input, Needed::Size(2), ErrorKind::CrLf),
CompareResult::Error => {
let e: ErrorKind<u32> = ErrorKind::CrLf;
Err(Err::Error(error_position!(input, e)))
}
}
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
// public methods
pub fn not_line_ending<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + InputLength + AtEof,
T: Compare<&'static str>,
<T as InputIter>::Item: AsChar,
<T as InputIter>::RawItem: AsChar,
{
match input.position(|item| {
let c = item.as_char();
c == '\r' || c == '\n'
}) {
None => {
if input.at_eof() {
Ok((input.slice(input.input_len()..), input))
} else {
Err(Err::Incomplete(Needed::Unknown))
}
}
Some(index) => {
let mut it = input.slice(index..).iter_elements();
let nth = it.next().unwrap().as_char();
if nth == '\r' {
let sliced = input.slice(index..);
let comp = sliced.compare("\r\n");
match comp {
//FIXME: calculate the right index
CompareResult::Incomplete => need_more_err(input, Needed::Unknown, ErrorKind::Tag),
CompareResult::Error => {
let e: ErrorKind<u32> = ErrorKind::Tag;
Err(Err::Error(error_position!(input, e)))
}
CompareResult::Ok => Ok((input.slice(index..), input.slice(..index))),
}
} else {
Ok((input.slice(index..), input.slice(..index)))
}
}
}
}
/// Recognizes an end of line (both '\n' and '\r\n')
pub fn line_ending<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + InputLength + AtEof,
T: Compare<&'static str>,
{
match input.compare("\n") {
CompareResult::Ok => Ok((input.slice(1..), input.slice(0..1))),
CompareResult::Incomplete => need_more_err(input, Needed::Size(1), ErrorKind::CrLf::<u32>),
CompareResult::Error => {
match input.compare("\r\n") {
//FIXME: is this the right index?
CompareResult::Ok => Ok((input.slice(2..), input.slice(0..2))),
CompareResult::Incomplete => need_more_err(input, Needed::Size(2), ErrorKind::CrLf::<u32>),
CompareResult::Error => Err(Err::Error(error_position!(input, ErrorKind::CrLf::<u32>))),
}
}
}
}
pub fn eol<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + InputLength + AtEof,
T: Compare<&'static str>,
{
line_ending(input)
}
/// Tests if byte is ASCII alphabetic: A-Z, a-z
#[inline]
pub fn is_alphabetic(chr: u8) -> bool {
(chr >= 0x41 && chr <= 0x5A) || (chr >= 0x61 && chr <= 0x7A)
}
/// Tests if byte is ASCII digit: 0-9
#[inline]
pub fn is_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x39
}
/// Tests if byte is ASCII hex digit: 0-9, A-F, a-f
#[inline]
pub fn is_hex_digit(chr: u8) -> bool {
(chr >= 0x30 && chr <= 0x39) || (chr >= 0x41 && chr <= 0x46) || (chr >= 0x61 && chr <= 0x66)
}
/// Tests if byte is ASCII octal digit: 0-7
#[inline]
pub fn is_oct_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x37
}
/// Tests if byte is ASCII alphanumeric: A-Z, a-z, 0-9
#[inline]
pub fn is_alphanumeric(chr: u8) -> bool {
is_alphabetic(chr) || is_digit(chr)
}
/// Tests if byte is ASCII space or tab
#[inline]
pub fn is_space(chr: u8) -> bool {
chr == b' ' || chr == b'\t'
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
//pub filter!(alpha is_alphabetic)
//pub filter!(digit is_digit)
//pub filter!(hex_digit is_hex_digit)
//pub filter!(oct_digit is_oct_digit)
//pub filter!(alphanumeric is_alphanumeric)
/// Recognizes one or more lowercase and uppercase alphabetic characters: a-zA-Z
pub fn alpha<T>(input: T) -> IResult<T, T, u32>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
alpha1(input)
}
/// Recognizes zero or more lowercase and uppercase alphabetic characters: a-zA-Z
pub fn alpha0<T>(input: T) -> IResult<T, T, u32>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_alpha())
}
/// Recognizes one or more lowercase and uppercase alphabetic characters: a-zA-Z
pub fn alpha1<T>(input: T) -> IResult<T, T, u32>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_alpha(), ErrorKind::Alpha)
}
/// Recognizes one or more numerical characters: 0-9
pub fn digit<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
digit1(input)
}
/// Recognizes zero or more numerical characters: 0-9
pub fn digit0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_dec_digit())
}
/// Recognizes one or more numerical characters: 0-9
pub fn digit1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_dec_digit(), ErrorKind::Digit)
}
/// Recognizes one or more hexadecimal numerical characters: 0-9, A-F, a-f
pub fn hex_digit<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
hex_digit1(input)
}
/// Recognizes zero or more hexadecimal numerical characters: 0-9, A-F, a-f
pub fn hex_digit0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_hex_digit())
}
/// Recognizes one or more hexadecimal numerical characters: 0-9, A-F, a-f
pub fn hex_digit1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_hex_digit(), ErrorKind::HexDigit)
}
/// Recognizes one or more octal characters: 0-7
pub fn oct_digit<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
oct_digit1(input)
}
/// Recognizes zero or more octal characters: 0-7
pub fn oct_digit0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_oct_digit())
}
/// Recognizes one or more octal characters: 0-7
pub fn oct_digit1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_oct_digit(), ErrorKind::OctDigit)
}
/// Recognizes one or more numerical and alphabetic characters: 0-9a-zA-Z
pub fn alphanumeric<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
alphanumeric1(input)
}
/// Recognizes zero or more numerical and alphabetic characters: 0-9a-zA-Z
pub fn alphanumeric0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_alphanum())
}
/// Recognizes one or more numerical and alphabetic characters: 0-9a-zA-Z
pub fn alphanumeric1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_alphanum(), ErrorKind::AlphaNumeric)
}
/// Recognizes one or more spaces and tabs
pub fn space<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
space1(input)
}
/// Recognizes zero or more spaces and tabs
pub fn space0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position(|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t')
})
}
/// Recognizes one or more spaces and tabs
pub fn space1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position1(
|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t')
},
ErrorKind::Space,
)
}
/// Recognizes one or more spaces, tabs, carriage returns and line feeds
pub fn multispace<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
multispace1(input)
}
/// Recognizes zero or more spaces, tabs, carriage returns and line feeds
pub fn multispace0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position(|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t' || c == '\r' || c == '\n')
})
}
/// Recognizes one or more spaces, tabs, carriage returns and line feeds
pub fn multispace1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position1(
|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t' || c == '\r' || c == '\n')
},
ErrorKind::MultiSpace,
)
}
pub fn sized_buffer(input: &[u8]) -> IResult<&[u8], &[u8]> {
if input.is_empty() {
return need_more(input, Needed::Unknown);
}
let len = input[0] as usize;
if input.len() >= len + 1 {
Ok((&input[len + 1..], &input[1..len + 1]))
} else {
need_more(input, Needed::Size(1 + len))
}
}
/// Recognizes an unsigned 1 byte integer (equivalent to take!(1)
#[inline]
pub fn be_u8(i: &[u8]) -> IResult<&[u8], u8> {
if i.len() < 1 {
need_more(i, Needed::Size(1))
} else {
Ok((&i[1..], i[0]))
}
}
/// Recognizes big endian unsigned 2 bytes integer
#[inline]
pub fn be_u16(i: &[u8]) -> IResult<&[u8], u16> {
if i.len() < 2 {
need_more(i, Needed::Size(2))
} else {
let res = ((i[0] as u16) << 8) + i[1] as u16;
Ok((&i[2..], res))
}
}
/// Recognizes big endian unsigned 3 byte integer
#[inline]
pub fn be_u24(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 3 {
need_more(i, Needed::Size(3))
} else {
let res = ((i[0] as u32) << 16) + ((i[1] as u32) << 8) + (i[2] as u32);
Ok((&i[3..], res))
}
}
/// Recognizes big endian unsigned 4 bytes integer
#[inline]
pub fn be_u32(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 4 {
need_more(i, Needed::Size(4))
} else {
let res = ((i[0] as u32) << 24) + ((i[1] as u32) << 16) + ((i[2] as u32) << 8) + i[3] as u32;
Ok((&i[4..], res))
}
}
/// Recognizes big endian unsigned 8 bytes integer
#[inline]
pub fn be_u64(i: &[u8]) -> IResult<&[u8], u64, u32> {
if i.len() < 8 {
need_more(i, Needed::Size(8))
} else {
let res = ((i[0] as u64) << 56) + ((i[1] as u64) << 48) + ((i[2] as u64) << 40) + ((i[3] as u64) << 32) + ((i[4] as u64) << 24)
+ ((i[5] as u64) << 16) + ((i[6] as u64) << 8) + i[7] as u64;
Ok((&i[8..], res))
}
}
/// Recognizes a signed 1 byte integer (equivalent to take!(1)
#[inline]
pub fn be_i8(i: &[u8]) -> IResult<&[u8], i8> {
map!(i, be_u8, |x| x as i8)
}
/// Recognizes big endian signed 2 bytes integer
#[inline]
pub fn be_i16(i: &[u8]) -> IResult<&[u8], i16> {
map!(i, be_u16, |x| x as i16)
}
/// Recognizes big endian signed 3 bytes integer
#[inline]
pub fn be_i24(i: &[u8]) -> IResult<&[u8], i32> {
// Same as the unsigned version but we need to sign-extend manually here
map!(i, be_u24, |x| if x & 0x80_00_00 != 0 {
(x | 0xff_00_00_00) as i32
} else {
x as i32
})
}
/// Recognizes big endian signed 4 bytes integer
#[inline]
pub fn be_i32(i: &[u8]) -> IResult<&[u8], i32> {
map!(i, be_u32, |x| x as i32)
}
/// Recognizes big endian signed 8 bytes integer
#[inline]
pub fn be_i64(i: &[u8]) -> IResult<&[u8], i64> {
map!(i, be_u64, |x| x as i64)
}
/// Recognizes an unsigned 1 byte integer (equivalent to take!(1)
#[inline]
pub fn le_u8(i: &[u8]) -> IResult<&[u8], u8> {
if i.len() < 1 {
need_more(i, Needed::Size(1))
} else {
Ok((&i[1..], i[0]))
}
}
/// Recognizes little endian unsigned 2 bytes integer
#[inline]
pub fn le_u16(i: &[u8]) -> IResult<&[u8], u16> {
if i.len() < 2 {
need_more(i, Needed::Size(2))
} else {
let res = ((i[1] as u16) << 8) + i[0] as u16;
Ok((&i[2..], res))
}
}
/// Recognizes little endian unsigned 3 byte integer
#[inline]
pub fn le_u24(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 3 {
need_more(i, Needed::Size(3))
} else {
let res = (i[0] as u32) + ((i[1] as u32) << 8) + ((i[2] as u32) << 16);
Ok((&i[3..], res))
}
}
/// Recognizes little endian unsigned 4 bytes integer
#[inline]
pub fn le_u32(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 4 {
need_more(i, Needed::Size(4))
} else {
let res = ((i[3] as u32) << 24) + ((i[2] as u32) << 16) + ((i[1] as u32) << 8) + i[0] as u32;
Ok((&i[4..], res))
}
}
/// Recognizes little endian unsigned 8 bytes integer
#[inline]
pub fn le_u64(i: &[u8]) -> IResult<&[u8], u64> {
if i.len() < 8 {
need_more(i, Needed::Size(8))
} else {
let res = ((i[7] as u64) << 56) + ((i[6] as u64) << 48) + ((i[5] as u64) << 40) + ((i[4] as u64) << 32) + ((i[3] as u64) << 24)
+ ((i[2] as u64) << 16) + ((i[1] as u64) << 8) + i[0] as u64;
Ok((&i[8..], res))
}
}
/// Recognizes a signed 1 byte integer (equivalent to take!(1)
#[inline]
pub fn le_i8(i: &[u8]) -> IResult<&[u8], i8> {
map!(i, le_u8, |x| x as i8)
}
/// Recognizes little endian signed 2 bytes integer
#[inline]
pub fn le_i16(i: &[u8]) -> IResult<&[u8], i16> {
map!(i, le_u16, |x| x as i16)
}
/// Recognizes little endian signed 3 bytes integer
#[inline]
pub fn le_i24(i: &[u8]) -> IResult<&[u8], i32> {
// Same as the unsigned version but we need to sign-extend manually here
map!(i, le_u24, |x| if x & 0x80_00_00 != 0 {
(x | 0xff_00_00_00) as i32
} else {
x as i32
})
}
/// Recognizes little endian signed 4 bytes integer
#[inline]
pub fn le_i32(i: &[u8]) -> IResult<&[u8], i32> {
map!(i, le_u32, |x| x as i32)
}
/// Recognizes little endian signed 8 bytes integer
#[inline]
pub fn le_i64(i: &[u8]) -> IResult<&[u8], i64> {
map!(i, le_u64, |x| x as i64)
}
/// Configurable endianness
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Endianness {
Big,
Little,
}
/// if the parameter is nom::Endianness::Big, parse a big endian u16 integer,
/// otherwise a little endian u16 integer
#[macro_export]
macro_rules! u16 ( ($i:expr, $e:expr) => ( {if Endianness::Big == $e { be_u16($i) } else { le_u16($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian u32 integer,
/// otherwise a little endian u32 integer
#[macro_export]
macro_rules! u32 ( ($i:expr, $e:expr) => ( {if Endianness::Big == $e { be_u32($i) } else { le_u32($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian u64 integer,
/// otherwise a little endian u64 integer
#[macro_export]
macro_rules! u64 ( ($i:expr, $e:expr) => ( {if Endianness::Big == $e { be_u64($i) } else { le_u64($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian i16 integer,
/// otherwise a little endian i16 integer
#[macro_export]
macro_rules! i16 ( ($i:expr, $e:expr) => ( {if Endianness::Big == $e { be_i16($i) } else { le_i16($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian i32 integer,
/// otherwise a little endian i32 integer
#[macro_export]
macro_rules! i32 ( ($i:expr, $e:expr) => ( {if Endianness::Big == $e { be_i32($i) } else { le_i32($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian i64 integer,
/// otherwise a little endian i64 integer
#[macro_export]
macro_rules! i64 ( ($i:expr, $e:expr) => ( {if Endianness::Big == $e { be_i64($i) } else { le_i64($i) } } ););
/// Recognizes big endian 4 bytes floating point number
#[inline]
pub fn be_f32(input: &[u8]) -> IResult<&[u8], f32> {
match be_u32(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u32, f32>(o))) },
}
}
/// Recognizes big endian 8 bytes floating point number
#[inline]
pub fn be_f64(input: &[u8]) -> IResult<&[u8], f64> {
match be_u64(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u64, f64>(o))) },
}
}
/// Recognizes little endian 4 bytes floating point number
#[inline]
pub fn le_f32(input: &[u8]) -> IResult<&[u8], f32> {
match le_u32(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u32, f32>(o))) },
}
}
/// Recognizes little endian 8 bytes floating point number
#[inline]
pub fn le_f64(input: &[u8]) -> IResult<&[u8], f64> {
match le_u64(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u64, f64>(o))) },
}
}
/// Recognizes a hex-encoded integer
#[inline]
pub fn hex_u32(input: &[u8]) -> IResult<&[u8], u32> {
match is_a!(input, &b"0123456789abcdefABCDEF"[..]) {
Err(e) => Err(e),
Ok((i, o)) => {
// Do not parse more than 8 characters for a u32
let (parsed, remaining) = if o.len() <= 8 {
(o, i)
} else {
(&input[..8], &input[8..])
};
let res = parsed
.iter()
.rev()
.enumerate()
.map(|(k, &v)| {
let digit = v as char;
digit.to_digit(16).unwrap_or(0) << (k * 4)
})
.sum();
Ok((remaining, res))
}
}
}
/// Recognizes non empty buffers
#[inline]
pub fn non_empty<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputLength + AtEof,
{
if input.input_len() == 0 {
return need_more_err(input, Needed::Unknown, ErrorKind::NonEmpty::<u32>);
} else {
Ok((input.slice(input.input_len()..), input))
}
}
/// Return the remaining input.
#[inline]
pub fn rest<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputLength,
{
Ok((input.slice(input.input_len()..), input))
}
/// Return the remaining input, for strings.
#[inline]
pub fn rest_s(input: &str) -> IResult<&str, &str> {
Ok((&input[input.len()..], input))
}
#[allow(unused_imports)]
#[cfg_attr(rustfmt, rustfmt_skip)]
pub fn recognize_float<T>(input: T) -> IResult<T, T, u32>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: Clone + Offset,
T: InputIter + AtEof,
<T as InputIter>::Item: AsChar,
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar
{
recognize!(input,
tuple!(
opt!(alt!(char!('+') | char!('-'))),
alt!(
value!((), tuple!(digit, opt!(pair!(char!('.'), opt!(digit)))))
| value!((), tuple!(char!('.'), digit))
),
opt!(tuple!(
alt!(char!('e') | char!('E')),
opt!(alt!(char!('+') | char!('-'))),
digit
)
)
)
)
}
/// Recognizes floating point number in a byte string and returns a f32
#[cfg(feature = "alloc")]
pub fn float(input: &[u8]) -> IResult<&[u8], f32> {
flat_map!(input, recognize_float, parse_to!(f32))
}
/// Recognizes floating point number in a string and returns a f32
#[cfg(feature = "alloc")]
pub fn float_s(input: &str) -> IResult<&str, f32> {
flat_map!(input, call!(recognize_float), parse_to!(f32))
}
/// Recognizes floating point number in a byte string and returns a f64
#[cfg(feature = "alloc")]
pub fn double(input: &[u8]) -> IResult<&[u8], f64> {
flat_map!(input, call!(recognize_float), parse_to!(f64))
}
/// Recognizes floating point number in a string and returns a f64
#[cfg(feature = "alloc")]
pub fn double_s(input: &str) -> IResult<&str, f64> {
flat_map!(input, call!(recognize_float), parse_to!(f64))
}
#[cfg(test)]
mod tests {
use super::*;
use internal::{Err, IResult, Needed};
use types::{CompleteByteSlice, CompleteStr};
#[test]
#[cfg(feature = "alloc")]
fn tag_closure() {
let x = tag_cl(&b"abcd"[..]);
let r = x(&b"abcdabcdefgh"[..]);
assert_eq!(r, Ok((&b"abcdefgh"[..], &b"abcd"[..])));
let r2 = x(&b"abcefgh"[..]);
assert_eq!(
r2,
Err(Err::Error(error_position!(
&b"abcefgh"[..],
ErrorKind::TagClosure
),))
);
}
#[test]
fn character() {
let empty: &[u8] = b"";
let a: &[u8] = b"abcd";
let b: &[u8] = b"1234";
let c: &[u8] = b"a123";
let d: &[u8] = "azé12".as_bytes();
let e: &[u8] = b" ";
let f: &[u8] = b" ;";
assert_eq!(alpha(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alpha(CompleteByteSlice(a)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(a)))
);
assert_eq!(
alpha(b),
Err(Err::Error(error_position!(b, ErrorKind::Alpha)))
);
assert_eq!(alpha(c), Ok((&c[1..], &b"a"[..])));
assert_eq!(alpha(d), Ok(("é12".as_bytes(), &b"az"[..])));
assert_eq!(
digit(a),
Err(Err::Error(error_position!(a, ErrorKind::Digit)))
);
assert_eq!(digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
digit(CompleteByteSlice(b)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b)))
);
assert_eq!(
digit(c),
Err(Err::Error(error_position!(c, ErrorKind::Digit)))
);
assert_eq!(
digit(d),
Err(Err::Error(error_position!(d, ErrorKind::Digit)))
);
assert_eq!(hex_digit(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteByteSlice(a)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(a)))
);
assert_eq!(hex_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteByteSlice(b)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b)))
);
assert_eq!(hex_digit(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteByteSlice(c)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(c)))
);
assert_eq!(hex_digit(d), Ok(("zé12".as_bytes(), &b"a"[..])));
assert_eq!(
hex_digit(e),
Err(Err::Error(error_position!(e, ErrorKind::HexDigit)))
);
assert_eq!(
oct_digit(a),
Err(Err::Error(error_position!(a, ErrorKind::OctDigit)))
);
assert_eq!(oct_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
oct_digit(CompleteByteSlice(b)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b)))
);
assert_eq!(
oct_digit(c),
Err(Err::Error(error_position!(c, ErrorKind::OctDigit)))
);
assert_eq!(
oct_digit(d),
Err(Err::Error(error_position!(d, ErrorKind::OctDigit)))
);
assert_eq!(alphanumeric(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteByteSlice(a)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(a)))
);
//assert_eq!(fix_error!(b,(), alphanumeric), Ok((empty, b)));
assert_eq!(alphanumeric(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteByteSlice(c)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(c)))
);
assert_eq!(alphanumeric(d), Ok(("é12".as_bytes(), &b"az"[..])));
assert_eq!(space(e), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
space(CompleteByteSlice(e)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b" ")))
);
assert_eq!(space(f), Ok((&b";"[..], &b" "[..])));
assert_eq!(
space(CompleteByteSlice(f)),
Ok((CompleteByteSlice(b";"), CompleteByteSlice(b" ")))
);
}
#[cfg(feature = "alloc")]
#[test]
fn character_s() {
let empty = "";
let a = "abcd";
let b = "1234";
let c = "a123";
let d = "azé12";
let e = " ";
assert_eq!(alpha(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alpha(CompleteStr(a)),
Ok((CompleteStr(empty), CompleteStr(a)))
);
assert_eq!(
alpha(b),
Err(Err::Error(error_position!(b, ErrorKind::Alpha)))
);
assert_eq!(alpha(c), Ok((&c[1..], &"a"[..])));
assert_eq!(alpha(d), Ok(("12", &"azé"[..])));
assert_eq!(
digit(a),
Err(Err::Error(error_position!(a, ErrorKind::Digit)))
);
assert_eq!(digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
digit(CompleteStr(b)),
Ok((CompleteStr(empty), CompleteStr(b)))
);
assert_eq!(
digit(c),
Err(Err::Error(error_position!(c, ErrorKind::Digit)))
);
assert_eq!(
digit(d),
Err(Err::Error(error_position!(d, ErrorKind::Digit)))
);
assert_eq!(hex_digit(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteStr(a)),
Ok((CompleteStr(empty), CompleteStr(a)))
);
assert_eq!(hex_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteStr(b)),
Ok((CompleteStr(empty), CompleteStr(b)))
);
assert_eq!(hex_digit(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteStr(c)),
Ok((CompleteStr(empty), CompleteStr(c)))
);
assert_eq!(hex_digit(d), Ok(("zé12", &"a"[..])));
assert_eq!(
hex_digit(e),
Err(Err::Error(error_position!(e, ErrorKind::HexDigit)))
);
assert_eq!(
oct_digit(a),
Err(Err::Error(error_position!(a, ErrorKind::OctDigit)))
);
assert_eq!(oct_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
oct_digit(CompleteStr(b)),
Ok((CompleteStr(empty), CompleteStr(b)))
);
assert_eq!(
oct_digit(c),
Err(Err::Error(error_position!(c, ErrorKind::OctDigit)))
);
assert_eq!(
oct_digit(d),
Err(Err::Error(error_position!(d, ErrorKind::OctDigit)))
);
assert_eq!(alphanumeric(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteStr(a)),
Ok((CompleteStr(empty), CompleteStr(a)))
);
//assert_eq!(fix_error!(b,(), alphanumeric), Ok((empty, b)));
assert_eq!(alphanumeric(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteStr(c)),
Ok((CompleteStr(empty), CompleteStr(c)))
);
assert_eq!(alphanumeric(d), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteStr(d)),
Ok((CompleteStr(""), CompleteStr("azé12")))
);
assert_eq!(space(e), Err(Err::Incomplete(Needed::Size(1))));
}
use traits::Offset;
#[test]
fn offset() {
let a = &b"abcd;"[..];
let b = &b"1234;"[..];
let c = &b"a123;"[..];
let d = &b" \t;"[..];
let e = &b" \t\r\n;"[..];
let f = &b"123abcDEF;"[..];
match alpha(a) {
Ok((i, _)) => {
assert_eq!(a.offset(i) + i.len(), a.len());
}
_ => panic!("wrong return type in offset test for alpha"),
}
match digit(b) {
Ok((i, _)) => {
assert_eq!(b.offset(i) + i.len(), b.len());
}
_ => panic!("wrong return type in offset test for digit"),
}
match alphanumeric(c) {
Ok((i, _)) => {
assert_eq!(c.offset(i) + i.len(), c.len());
}
_ => panic!("wrong return type in offset test for alphanumeric"),
}
match space(d) {
Ok((i, _)) => {
assert_eq!(d.offset(i) + i.len(), d.len());
}
_ => panic!("wrong return type in offset test for space"),
}
match multispace(e) {
Ok((i, _)) => {
assert_eq!(e.offset(i) + i.len(), e.len());
}
_ => panic!("wrong return type in offset test for multispace"),
}
match hex_digit(f) {
Ok((i, _)) => {
assert_eq!(f.offset(i) + i.len(), f.len());
}
_ => panic!("wrong return type in offset test for hex_digit"),
}
match oct_digit(f) {
Ok((i, _)) => {
assert_eq!(f.offset(i) + i.len(), f.len());
}
_ => panic!("wrong return type in offset test for oct_digit"),
}
}
#[test]
fn is_not_line_ending_bytes() {
let a: &[u8] = b"ab12cd\nefgh";
assert_eq!(not_line_ending(a), Ok((&b"\nefgh"[..], &b"ab12cd"[..])));
let b: &[u8] = b"ab12cd\nefgh\nijkl";
assert_eq!(
not_line_ending(b),
Ok((&b"\nefgh\nijkl"[..], &b"ab12cd"[..]))
);
let c: &[u8] = b"ab12cd\r\nefgh\nijkl";
assert_eq!(
not_line_ending(c),
Ok((&b"\r\nefgh\nijkl"[..], &b"ab12cd"[..]))
);
let d = CompleteByteSlice(b"ab12cd");
assert_eq!(not_line_ending(d), Ok((CompleteByteSlice(b""), d)));
let d: &[u8] = b"ab12cd";
assert_eq!(not_line_ending(d), Err(Err::Incomplete(Needed::Unknown)));
}
#[test]
fn is_not_line_ending_str() {
/*
let a: &str = "ab12cd\nefgh";
assert_eq!(not_line_ending(a), Ok((&"\nefgh"[..], &"ab12cd"[..])));
let b: &str = "ab12cd\nefgh\nijkl";
assert_eq!(not_line_ending(b), Ok((&"\nefgh\nijkl"[..], &"ab12cd"[..])));
let c: &str = "ab12cd\r\nefgh\nijkl";
assert_eq!(not_line_ending(c), Ok((&"\r\nefgh\nijkl"[..], &"ab12cd"[..])));
let d = "βèƒôřè\nÂßÇáƒƭèř";
assert_eq!(not_line_ending(d), Ok((&"\nÂßÇáƒƭèř"[..], &"βèƒôřè"[..])));
let e = "βèƒôřè\r\nÂßÇáƒƭèř";
assert_eq!(not_line_ending(e), Ok((&"\r\nÂßÇáƒƭèř"[..], &"βèƒôřè"[..])));
*/
let f = "βèƒôřè\rÂßÇáƒƭèř";
assert_eq!(
not_line_ending(f),
Err(Err::Error(error_position!(f, ErrorKind::Tag)))
);
let g = CompleteStr("ab12cd");
assert_eq!(not_line_ending(g), Ok((CompleteStr(""), g)));
let g2: &str = "ab12cd";
assert_eq!(not_line_ending(g2), Err(Err::Incomplete(Needed::Unknown)));
}
#[test]
#[cfg(feature = "alloc")]
fn buffer_with_size() {
use lib::std::vec::Vec;
let i: Vec<u8> = vec![7, 8];
let o: Vec<u8> = vec![4, 5, 6];
//let arr:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let arr: [u8; 6usize] = [3, 4, 5, 6, 7, 8];
let res = sized_buffer(&arr[..]);
assert_eq!(res, Ok((&i[..], &o[..])))
}
/*#[test]
fn t1() {
let v1:Vec<u8> = vec![1,2,3];
let v2:Vec<u8> = vec![4,5,6];
let d = Ok((&v1[..], &v2[..]));
let res = d.flat_map(print);
assert_eq!(res, Ok((&v2[..], ())));
}*/
#[test]
fn i8_tests() {
assert_eq!(be_i8(&[0x00]), Ok((&b""[..], 0)));
assert_eq!(be_i8(&[0x7f]), Ok((&b""[..], 127)));
assert_eq!(be_i8(&[0xff]), Ok((&b""[..], -1)));
assert_eq!(be_i8(&[0x80]), Ok((&b""[..], -128)));
}
#[test]
fn i16_tests() {
assert_eq!(be_i16(&[0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(be_i16(&[0x7f, 0xff]), Ok((&b""[..], 32_767_i16)));
assert_eq!(be_i16(&[0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(be_i16(&[0x80, 0x00]), Ok((&b""[..], -32_768_i16)));
}
#[test]
fn u24_tests() {
assert_eq!(be_u24(&[0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(be_u24(&[0x00, 0xFF, 0xFF]), Ok((&b""[..], 65_535_u32)));
assert_eq!(be_u24(&[0x12, 0x34, 0x56]), Ok((&b""[..], 1_193_046_u32)));
}
#[test]
fn i24_tests() {
assert_eq!(be_i24(&[0xFF, 0xFF, 0xFF]), Ok((&b""[..], -1_i32)));
assert_eq!(be_i24(&[0xFF, 0x00, 0x00]), Ok((&b""[..], -65_536_i32)));
assert_eq!(be_i24(&[0xED, 0xCB, 0xAA]), Ok((&b""[..], -1_193_046_i32)));
}
#[test]
fn i32_tests() {
assert_eq!(be_i32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(
be_i32(&[0x7f, 0xff, 0xff, 0xff]),
Ok((&b""[..], 2_147_483_647_i32))
);
assert_eq!(be_i32(&[0xff, 0xff, 0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(
be_i32(&[0x80, 0x00, 0x00, 0x00]),
Ok((&b""[..], -2_147_483_648_i32))
);
}
#[test]
fn i64_tests() {
assert_eq!(
be_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0))
);
assert_eq!(
be_i64(&[0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]),
Ok((&b""[..], 9_223_372_036_854_775_807_i64))
);
assert_eq!(
be_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]),
Ok((&b""[..], -1))
);
assert_eq!(
be_i64(&[0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], -9_223_372_036_854_775_808_i64))
);
}
#[test]
fn le_i8_tests() {
assert_eq!(le_i8(&[0x00]), Ok((&b""[..], 0)));
assert_eq!(le_i8(&[0x7f]), Ok((&b""[..], 127)));
assert_eq!(le_i8(&[0xff]), Ok((&b""[..], -1)));
assert_eq!(le_i8(&[0x80]), Ok((&b""[..], -128)));
}
#[test]
fn le_i16_tests() {
assert_eq!(le_i16(&[0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(le_i16(&[0xff, 0x7f]), Ok((&b""[..], 32_767_i16)));
assert_eq!(le_i16(&[0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(le_i16(&[0x00, 0x80]), Ok((&b""[..], -32_768_i16)));
}
#[test]
fn le_u24_tests() {
assert_eq!(le_u24(&[0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(le_u24(&[0xFF, 0xFF, 0x00]), Ok((&b""[..], 65_535_u32)));
assert_eq!(le_u24(&[0x56, 0x34, 0x12]), Ok((&b""[..], 1_193_046_u32)));
}
#[test]
fn le_i24_tests() {
assert_eq!(le_i24(&[0xFF, 0xFF, 0xFF]), Ok((&b""[..], -1_i32)));
assert_eq!(le_i24(&[0x00, 0x00, 0xFF]), Ok((&b""[..], -65_536_i32)));
assert_eq!(le_i24(&[0xAA, 0xCB, 0xED]), Ok((&b""[..], -1_193_046_i32)));
}
#[test]
fn le_i32_tests() {
assert_eq!(le_i32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(
le_i32(&[0xff, 0xff, 0xff, 0x7f]),
Ok((&b""[..], 2_147_483_647_i32))
);
assert_eq!(le_i32(&[0xff, 0xff, 0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(
le_i32(&[0x00, 0x00, 0x00, 0x80]),
Ok((&b""[..], -2_147_483_648_i32))
);
}
#[test]
fn le_i64_tests() {
assert_eq!(
le_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0))
);
assert_eq!(
le_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f]),
Ok((&b""[..], 9_223_372_036_854_775_807_i64))
);
assert_eq!(
le_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]),
Ok((&b""[..], -1))
);
assert_eq!(
le_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80]),
Ok((&b""[..], -9_223_372_036_854_775_808_i64))
);
}
#[test]
fn be_f32_tests() {
assert_eq!(be_f32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0_f32)));
assert_eq!(
be_f32(&[0x4d, 0x31, 0x1f, 0xd8]),
Ok((&b""[..], 185_728_392_f32))
);
}
#[test]
fn be_f64_tests() {
assert_eq!(
be_f64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0_f64))
);
assert_eq!(
be_f64(&[0x41, 0xa6, 0x23, 0xfb, 0x10, 0x00, 0x00, 0x00]),
Ok((&b""[..], 185_728_392_f64))
);
}
#[test]
fn le_f32_tests() {
assert_eq!(le_f32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0_f32)));
assert_eq!(
le_f32(&[0xd8, 0x1f, 0x31, 0x4d]),
Ok((&b""[..], 185_728_392_f32))
);
}
#[test]
fn le_f64_tests() {
assert_eq!(
le_f64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0_f64))
);
assert_eq!(
le_f64(&[0x00, 0x00, 0x00, 0x10, 0xfb, 0x23, 0xa6, 0x41]),
Ok((&b""[..], 185_728_392_f64))
);
}
#[test]
fn hex_u32_tests() {
assert_eq!(
hex_u32(&b";"[..]),
Err(Err::Error(error_position!(&b";"[..], ErrorKind::IsA)))
);
assert_eq!(hex_u32(&b"ff;"[..]), Ok((&b";"[..], 255)));
assert_eq!(hex_u32(&b"1be2;"[..]), Ok((&b";"[..], 7138)));
assert_eq!(hex_u32(&b"c5a31be2;"[..]), Ok((&b";"[..], 3_315_801_058)));
assert_eq!(hex_u32(&b"C5A31be2;"[..]), Ok((&b";"[..], 3_315_801_058)));
assert_eq!(hex_u32(&b"00c5a31be2;"[..]), Ok((&b"e2;"[..], 12_952_347)));
assert_eq!(
hex_u32(&b"c5a31be201;"[..]),
Ok((&b"01;"[..], 3_315_801_058))
);
assert_eq!(hex_u32(&b"ffffffff;"[..]), Ok((&b";"[..], 4_294_967_295)));
assert_eq!(hex_u32(&b"0x1be2;"[..]), Ok((&b"x1be2;"[..], 0)));
}
/*
#[test]
fn end_of_input() {
let not_over = &b"Hello, world!"[..];
let is_over = &b""[..];
named!(eof_test, eof!());
let res_not_over = eof_test(not_over);
assert_eq!(res_not_over, Err(Err::Error(error_position!(not_over, ErrorKind::Eof))));
let res_over = eof_test(is_over);
assert_eq!(res_over, Ok((is_over, is_over)));
}
*/
#[test]
fn rest_on_slices() {
let input: &[u8] = &b"Hello, world!"[..];
let empty: &[u8] = &b""[..];
assert_eq!(rest(input), Ok((empty, input)));
}
#[test]
fn rest_on_strs() {
let input: &str = "Hello, world!";
let empty: &str = "";
assert_eq!(rest(input), Ok((empty, input)));
}
#[test]
fn configurable_endianness() {
named!(be_tst16<u16>, u16!(Endianness::Big));
named!(le_tst16<u16>, u16!(Endianness::Little));
assert_eq!(be_tst16(&[0x80, 0x00]), Ok((&b""[..], 32_768_u16)));
assert_eq!(le_tst16(&[0x80, 0x00]), Ok((&b""[..], 128_u16)));
named!(be_tst32<u32>, u32!(Endianness::Big));
named!(le_tst32<u32>, u32!(Endianness::Little));
assert_eq!(
be_tst32(&[0x12, 0x00, 0x60, 0x00]),
Ok((&b""[..], 302_014_464_u32))
);
assert_eq!(
le_tst32(&[0x12, 0x00, 0x60, 0x00]),
Ok((&b""[..], 6_291_474_u32))
);
named!(be_tst64<u64>, u64!(Endianness::Big));
named!(le_tst64<u64>, u64!(Endianness::Little));
assert_eq!(
be_tst64(&[0x12, 0x00, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 1_297_142_246_100_992_000_u64))
);
assert_eq!(
le_tst64(&[0x12, 0x00, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 36_028_874_334_666_770_u64))
);
named!(be_tsti16<i16>, i16!(Endianness::Big));
named!(le_tsti16<i16>, i16!(Endianness::Little));
assert_eq!(be_tsti16(&[0x00, 0x80]), Ok((&b""[..], 128_i16)));
assert_eq!(le_tsti16(&[0x00, 0x80]), Ok((&b""[..], -32_768_i16)));
named!(be_tsti32<i32>, i32!(Endianness::Big));
named!(le_tsti32<i32>, i32!(Endianness::Little));
assert_eq!(
be_tsti32(&[0x00, 0x12, 0x60, 0x00]),
Ok((&b""[..], 1_204_224_i32))
);
assert_eq!(
le_tsti32(&[0x00, 0x12, 0x60, 0x00]),
Ok((&b""[..], 6_296_064_i32))
);
named!(be_tsti64<i64>, i64!(Endianness::Big));
named!(le_tsti64<i64>, i64!(Endianness::Little));
assert_eq!(
be_tsti64(&[0x00, 0xFF, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 71_881_672_479_506_432_i64))
);
assert_eq!(
le_tsti64(&[0x00, 0xFF, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 36_028_874_334_732_032_i64))
);
}
#[test]
#[cfg(feature = "std")]
fn manual_configurable_endianness_test() {
let x = 1;
let int_parse: Box<Fn(&[u8]) -> IResult<&[u8], u16>> = if x == 2 {
Box::new(be_u16)
} else {
Box::new(le_u16)
};
println!("{:?}", int_parse(&b"3"[..]));
assert_eq!(int_parse(&[0x80, 0x00]), Ok((&b""[..], 128_u16)));
}
use lib::std::convert::From;
impl From<u32> for CustomError {
fn from(_: u32) -> Self {
CustomError
}
}
struct CustomError;
#[allow(dead_code)]
fn custom_error(input: &[u8]) -> IResult<&[u8], &[u8], CustomError> {
fix_error!(input, CustomError, alphanumeric)
}
#[test]
fn hex_digit_test() {
let i = &b"0123456789abcdefABCDEF;"[..];
assert_eq!(hex_digit(i), Ok((&b";"[..], &i[..i.len() - 1])));
let i = &b"g"[..];
assert_eq!(
hex_digit(i),
Err(Err::Error(error_position!(i, ErrorKind::HexDigit)))
);
let i = &b"G"[..];
assert_eq!(
hex_digit(i),
Err(Err::Error(error_position!(i, ErrorKind::HexDigit)))
);
assert!(is_hex_digit(b'0'));
assert!(is_hex_digit(b'9'));
assert!(is_hex_digit(b'a'));
assert!(is_hex_digit(b'f'));
assert!(is_hex_digit(b'A'));
assert!(is_hex_digit(b'F'));
assert!(!is_hex_digit(b'g'));
assert!(!is_hex_digit(b'G'));
assert!(!is_hex_digit(b'/'));
assert!(!is_hex_digit(b':'));
assert!(!is_hex_digit(b'@'));
assert!(!is_hex_digit(b'\x60'));
}
#[test]
fn oct_digit_test() {
let i = &b"01234567;"[..];
assert_eq!(oct_digit(i), Ok((&b";"[..], &i[..i.len() - 1])));
let i = &b"8"[..];
assert_eq!(
oct_digit(i),
Err(Err::Error(error_position!(i, ErrorKind::OctDigit)))
);
assert!(is_oct_digit(b'0'));
assert!(is_oct_digit(b'7'));
assert!(!is_oct_digit(b'8'));
assert!(!is_oct_digit(b'9'));
assert!(!is_oct_digit(b'a'));
assert!(!is_oct_digit(b'A'));
assert!(!is_oct_digit(b'/'));
assert!(!is_oct_digit(b':'));
assert!(!is_oct_digit(b'@'));
assert!(!is_oct_digit(b'\x60'));
}
#[test]
fn full_line_windows() {
named!(
take_full_line<(&[u8], &[u8])>,
tuple!(not_line_ending, line_ending)
);
let input = b"abc\r\n";
let output = take_full_line(input);
assert_eq!(output, Ok((&b""[..], (&b"abc"[..], &b"\r\n"[..]))));
}
#[test]
fn full_line_unix() {
named!(
take_full_line<(&[u8], &[u8])>,
tuple!(not_line_ending, line_ending)
);
let input = b"abc\n";
let output = take_full_line(input);
assert_eq!(output, Ok((&b""[..], (&b"abc"[..], &b"\n"[..]))));
}
#[test]
fn check_windows_lineending() {
let input = b"\r\n";
let output = line_ending(&input[..]);
assert_eq!(output, Ok((&b""[..], &b"\r\n"[..])));
}
#[test]
fn check_unix_lineending() {
let input = b"\n";
let output = line_ending(&input[..]);
assert_eq!(output, Ok((&b""[..], &b"\n"[..])));
}
#[test]
fn cr_lf() {
assert_eq!(crlf(&b"\r\na"[..]), Ok((&b"a"[..], &b"\r\n"[..])));
assert_eq!(crlf(&b"\r"[..]), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
crlf(&b"\ra"[..]),
Err(Err::Error(error_position!(&b"\ra"[..], ErrorKind::CrLf)))
);
assert_eq!(crlf("\r\na"), Ok(("a", "\r\n")));
assert_eq!(crlf("\r"), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
crlf("\ra"),
Err(Err::Error(error_position!("\ra", ErrorKind::CrLf)))
);
}
#[test]
fn end_of_line() {
assert_eq!(eol(&b"\na"[..]), Ok((&b"a"[..], &b"\n"[..])));
assert_eq!(eol(&b"\r\na"[..]), Ok((&b"a"[..], &b"\r\n"[..])));
assert_eq!(eol(&b"\r"[..]), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
eol(&b"\ra"[..]),
Err(Err::Error(error_position!(&b"\ra"[..], ErrorKind::CrLf)))
);
assert_eq!(eol("\na"), Ok(("a", "\n")));
assert_eq!(eol("\r\na"), Ok(("a", "\r\n")));
assert_eq!(eol("\r"), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
eol("\ra"),
Err(Err::Error(error_position!("\ra", ErrorKind::CrLf)))
);
}
#[test]
#[cfg(feature = "std")]
fn float_test() {
let mut test_cases = vec![
"+3.14",
"3.14",
"-3.14",
"0",
"0.0",
"1.",
".789",
"-.5",
"1e7",
"-1E-7",
".3e-2",
"1.e4",
"1.2e4",
"-1.234E-12",
"-1.234e-12",
];
for test in test_cases.drain(..) {
let expected32 = str::parse::<f32>(test).unwrap();
let expected64 = str::parse::<f64>(test).unwrap();
println!("now parsing: {} -> {}", test, expected32);
assert_eq!(
recognize_float(CompleteStr(test)),
Ok((CompleteStr(""), CompleteStr(test)))
);
let larger = format!("{};", test);
assert_eq!(recognize_float(&larger[..]), Ok((";", test)));
assert_eq!(float(larger.as_bytes()), Ok((&b";"[..], expected32)));
assert_eq!(float_s(&larger[..]), Ok((";", expected32)));
assert_eq!(double(larger.as_bytes()), Ok((&b";"[..], expected64)));
assert_eq!(double_s(&larger[..]), Ok((";", expected64)));
}
let remaining_exponent = "-1.234E-";
assert_eq!(
recognize_float(remaining_exponent),
Err(Err::Incomplete(Needed::Size(1)))
);
}
#[allow(dead_code)]
pub fn end_of_line_completestr(input: CompleteStr) -> IResult<CompleteStr, CompleteStr> {
alt!(input, eof!() | eol)
}
}
completely import Endianness
//! Useful parser combinators
//!
//! A number of useful parser combinators have already been implemented.
//! Some of them use macros, other are implemented through functions.
//! Hopefully, the syntax will converge to onely one way in the future,
//! but the macros system makes no promises.
//!
#[cfg(feature = "alloc")]
use lib::std::boxed::Box;
#[cfg(feature = "std")]
use lib::std::fmt::Debug;
use internal::*;
use traits::{AsChar, InputIter, InputLength, InputTakeAtPosition};
use traits::{need_more, need_more_err, AtEof};
use lib::std::ops::{Range, RangeFrom, RangeTo};
use traits::{Compare, CompareResult, Offset, Slice};
use util::ErrorKind;
use lib::std::mem::transmute;
#[cfg(feature = "alloc")]
#[inline]
pub fn tag_cl<'a, 'b>(rec: &'a [u8]) -> Box<Fn(&'b [u8]) -> IResult<&'b [u8], &'b [u8]> + 'a> {
Box::new(move |i: &'b [u8]| -> IResult<&'b [u8], &'b [u8]> {
if i.len() >= rec.len() && &i[0..rec.len()] == rec {
Ok((&i[rec.len()..], &i[0..rec.len()]))
} else {
let e: ErrorKind<u32> = ErrorKind::TagClosure;
Err(Err::Error(error_position!(i, e)))
}
})
}
#[cfg(feature = "std")]
#[inline]
pub fn print<T: Debug>(input: T) -> IResult<T, ()> {
println!("{:?}", input);
Ok((input, ()))
}
#[inline]
pub fn begin(input: &[u8]) -> IResult<(), &[u8]> {
Ok(((), input))
}
pub fn crlf<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + AtEof,
T: Compare<&'static str>,
{
match input.compare("\r\n") {
//FIXME: is this the right index?
CompareResult::Ok => Ok((input.slice(2..), input.slice(0..2))),
CompareResult::Incomplete => need_more_err(input, Needed::Size(2), ErrorKind::CrLf),
CompareResult::Error => {
let e: ErrorKind<u32> = ErrorKind::CrLf;
Err(Err::Error(error_position!(input, e)))
}
}
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
// public methods
pub fn not_line_ending<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + InputLength + AtEof,
T: Compare<&'static str>,
<T as InputIter>::Item: AsChar,
<T as InputIter>::RawItem: AsChar,
{
match input.position(|item| {
let c = item.as_char();
c == '\r' || c == '\n'
}) {
None => {
if input.at_eof() {
Ok((input.slice(input.input_len()..), input))
} else {
Err(Err::Incomplete(Needed::Unknown))
}
}
Some(index) => {
let mut it = input.slice(index..).iter_elements();
let nth = it.next().unwrap().as_char();
if nth == '\r' {
let sliced = input.slice(index..);
let comp = sliced.compare("\r\n");
match comp {
//FIXME: calculate the right index
CompareResult::Incomplete => need_more_err(input, Needed::Unknown, ErrorKind::Tag),
CompareResult::Error => {
let e: ErrorKind<u32> = ErrorKind::Tag;
Err(Err::Error(error_position!(input, e)))
}
CompareResult::Ok => Ok((input.slice(index..), input.slice(..index))),
}
} else {
Ok((input.slice(index..), input.slice(..index)))
}
}
}
}
/// Recognizes an end of line (both '\n' and '\r\n')
pub fn line_ending<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + InputLength + AtEof,
T: Compare<&'static str>,
{
match input.compare("\n") {
CompareResult::Ok => Ok((input.slice(1..), input.slice(0..1))),
CompareResult::Incomplete => need_more_err(input, Needed::Size(1), ErrorKind::CrLf::<u32>),
CompareResult::Error => {
match input.compare("\r\n") {
//FIXME: is this the right index?
CompareResult::Ok => Ok((input.slice(2..), input.slice(0..2))),
CompareResult::Incomplete => need_more_err(input, Needed::Size(2), ErrorKind::CrLf::<u32>),
CompareResult::Error => Err(Err::Error(error_position!(input, ErrorKind::CrLf::<u32>))),
}
}
}
}
pub fn eol<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputIter + InputLength + AtEof,
T: Compare<&'static str>,
{
line_ending(input)
}
/// Tests if byte is ASCII alphabetic: A-Z, a-z
#[inline]
pub fn is_alphabetic(chr: u8) -> bool {
(chr >= 0x41 && chr <= 0x5A) || (chr >= 0x61 && chr <= 0x7A)
}
/// Tests if byte is ASCII digit: 0-9
#[inline]
pub fn is_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x39
}
/// Tests if byte is ASCII hex digit: 0-9, A-F, a-f
#[inline]
pub fn is_hex_digit(chr: u8) -> bool {
(chr >= 0x30 && chr <= 0x39) || (chr >= 0x41 && chr <= 0x46) || (chr >= 0x61 && chr <= 0x66)
}
/// Tests if byte is ASCII octal digit: 0-7
#[inline]
pub fn is_oct_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x37
}
/// Tests if byte is ASCII alphanumeric: A-Z, a-z, 0-9
#[inline]
pub fn is_alphanumeric(chr: u8) -> bool {
is_alphabetic(chr) || is_digit(chr)
}
/// Tests if byte is ASCII space or tab
#[inline]
pub fn is_space(chr: u8) -> bool {
chr == b' ' || chr == b'\t'
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
//pub filter!(alpha is_alphabetic)
//pub filter!(digit is_digit)
//pub filter!(hex_digit is_hex_digit)
//pub filter!(oct_digit is_oct_digit)
//pub filter!(alphanumeric is_alphanumeric)
/// Recognizes one or more lowercase and uppercase alphabetic characters: a-zA-Z
pub fn alpha<T>(input: T) -> IResult<T, T, u32>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
alpha1(input)
}
/// Recognizes zero or more lowercase and uppercase alphabetic characters: a-zA-Z
pub fn alpha0<T>(input: T) -> IResult<T, T, u32>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_alpha())
}
/// Recognizes one or more lowercase and uppercase alphabetic characters: a-zA-Z
pub fn alpha1<T>(input: T) -> IResult<T, T, u32>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_alpha(), ErrorKind::Alpha)
}
/// Recognizes one or more numerical characters: 0-9
pub fn digit<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
digit1(input)
}
/// Recognizes zero or more numerical characters: 0-9
pub fn digit0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_dec_digit())
}
/// Recognizes one or more numerical characters: 0-9
pub fn digit1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_dec_digit(), ErrorKind::Digit)
}
/// Recognizes one or more hexadecimal numerical characters: 0-9, A-F, a-f
pub fn hex_digit<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
hex_digit1(input)
}
/// Recognizes zero or more hexadecimal numerical characters: 0-9, A-F, a-f
pub fn hex_digit0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_hex_digit())
}
/// Recognizes one or more hexadecimal numerical characters: 0-9, A-F, a-f
pub fn hex_digit1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_hex_digit(), ErrorKind::HexDigit)
}
/// Recognizes one or more octal characters: 0-7
pub fn oct_digit<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
oct_digit1(input)
}
/// Recognizes zero or more octal characters: 0-7
pub fn oct_digit0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_oct_digit())
}
/// Recognizes one or more octal characters: 0-7
pub fn oct_digit1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_oct_digit(), ErrorKind::OctDigit)
}
/// Recognizes one or more numerical and alphabetic characters: 0-9a-zA-Z
pub fn alphanumeric<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
alphanumeric1(input)
}
/// Recognizes zero or more numerical and alphabetic characters: 0-9a-zA-Z
pub fn alphanumeric0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position(|item| !item.is_alphanum())
}
/// Recognizes one or more numerical and alphabetic characters: 0-9a-zA-Z
pub fn alphanumeric1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1(|item| !item.is_alphanum(), ErrorKind::AlphaNumeric)
}
/// Recognizes one or more spaces and tabs
pub fn space<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
space1(input)
}
/// Recognizes zero or more spaces and tabs
pub fn space0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position(|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t')
})
}
/// Recognizes one or more spaces and tabs
pub fn space1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position1(
|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t')
},
ErrorKind::Space,
)
}
/// Recognizes one or more spaces, tabs, carriage returns and line feeds
pub fn multispace<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
multispace1(input)
}
/// Recognizes zero or more spaces, tabs, carriage returns and line feeds
pub fn multispace0<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position(|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t' || c == '\r' || c == '\n')
})
}
/// Recognizes one or more spaces, tabs, carriage returns and line feeds
pub fn multispace1<T>(input: T) -> IResult<T, T>
where
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar + Clone,
{
input.split_at_position1(
|item| {
let c = item.clone().as_char();
!(c == ' ' || c == '\t' || c == '\r' || c == '\n')
},
ErrorKind::MultiSpace,
)
}
pub fn sized_buffer(input: &[u8]) -> IResult<&[u8], &[u8]> {
if input.is_empty() {
return need_more(input, Needed::Unknown);
}
let len = input[0] as usize;
if input.len() >= len + 1 {
Ok((&input[len + 1..], &input[1..len + 1]))
} else {
need_more(input, Needed::Size(1 + len))
}
}
/// Recognizes an unsigned 1 byte integer (equivalent to take!(1)
#[inline]
pub fn be_u8(i: &[u8]) -> IResult<&[u8], u8> {
if i.len() < 1 {
need_more(i, Needed::Size(1))
} else {
Ok((&i[1..], i[0]))
}
}
/// Recognizes big endian unsigned 2 bytes integer
#[inline]
pub fn be_u16(i: &[u8]) -> IResult<&[u8], u16> {
if i.len() < 2 {
need_more(i, Needed::Size(2))
} else {
let res = ((i[0] as u16) << 8) + i[1] as u16;
Ok((&i[2..], res))
}
}
/// Recognizes big endian unsigned 3 byte integer
#[inline]
pub fn be_u24(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 3 {
need_more(i, Needed::Size(3))
} else {
let res = ((i[0] as u32) << 16) + ((i[1] as u32) << 8) + (i[2] as u32);
Ok((&i[3..], res))
}
}
/// Recognizes big endian unsigned 4 bytes integer
#[inline]
pub fn be_u32(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 4 {
need_more(i, Needed::Size(4))
} else {
let res = ((i[0] as u32) << 24) + ((i[1] as u32) << 16) + ((i[2] as u32) << 8) + i[3] as u32;
Ok((&i[4..], res))
}
}
/// Recognizes big endian unsigned 8 bytes integer
#[inline]
pub fn be_u64(i: &[u8]) -> IResult<&[u8], u64, u32> {
if i.len() < 8 {
need_more(i, Needed::Size(8))
} else {
let res = ((i[0] as u64) << 56) + ((i[1] as u64) << 48) + ((i[2] as u64) << 40) + ((i[3] as u64) << 32) + ((i[4] as u64) << 24)
+ ((i[5] as u64) << 16) + ((i[6] as u64) << 8) + i[7] as u64;
Ok((&i[8..], res))
}
}
/// Recognizes a signed 1 byte integer (equivalent to take!(1)
#[inline]
pub fn be_i8(i: &[u8]) -> IResult<&[u8], i8> {
map!(i, be_u8, |x| x as i8)
}
/// Recognizes big endian signed 2 bytes integer
#[inline]
pub fn be_i16(i: &[u8]) -> IResult<&[u8], i16> {
map!(i, be_u16, |x| x as i16)
}
/// Recognizes big endian signed 3 bytes integer
#[inline]
pub fn be_i24(i: &[u8]) -> IResult<&[u8], i32> {
// Same as the unsigned version but we need to sign-extend manually here
map!(i, be_u24, |x| if x & 0x80_00_00 != 0 {
(x | 0xff_00_00_00) as i32
} else {
x as i32
})
}
/// Recognizes big endian signed 4 bytes integer
#[inline]
pub fn be_i32(i: &[u8]) -> IResult<&[u8], i32> {
map!(i, be_u32, |x| x as i32)
}
/// Recognizes big endian signed 8 bytes integer
#[inline]
pub fn be_i64(i: &[u8]) -> IResult<&[u8], i64> {
map!(i, be_u64, |x| x as i64)
}
/// Recognizes an unsigned 1 byte integer (equivalent to take!(1)
#[inline]
pub fn le_u8(i: &[u8]) -> IResult<&[u8], u8> {
if i.len() < 1 {
need_more(i, Needed::Size(1))
} else {
Ok((&i[1..], i[0]))
}
}
/// Recognizes little endian unsigned 2 bytes integer
#[inline]
pub fn le_u16(i: &[u8]) -> IResult<&[u8], u16> {
if i.len() < 2 {
need_more(i, Needed::Size(2))
} else {
let res = ((i[1] as u16) << 8) + i[0] as u16;
Ok((&i[2..], res))
}
}
/// Recognizes little endian unsigned 3 byte integer
#[inline]
pub fn le_u24(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 3 {
need_more(i, Needed::Size(3))
} else {
let res = (i[0] as u32) + ((i[1] as u32) << 8) + ((i[2] as u32) << 16);
Ok((&i[3..], res))
}
}
/// Recognizes little endian unsigned 4 bytes integer
#[inline]
pub fn le_u32(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 4 {
need_more(i, Needed::Size(4))
} else {
let res = ((i[3] as u32) << 24) + ((i[2] as u32) << 16) + ((i[1] as u32) << 8) + i[0] as u32;
Ok((&i[4..], res))
}
}
/// Recognizes little endian unsigned 8 bytes integer
#[inline]
pub fn le_u64(i: &[u8]) -> IResult<&[u8], u64> {
if i.len() < 8 {
need_more(i, Needed::Size(8))
} else {
let res = ((i[7] as u64) << 56) + ((i[6] as u64) << 48) + ((i[5] as u64) << 40) + ((i[4] as u64) << 32) + ((i[3] as u64) << 24)
+ ((i[2] as u64) << 16) + ((i[1] as u64) << 8) + i[0] as u64;
Ok((&i[8..], res))
}
}
/// Recognizes a signed 1 byte integer (equivalent to take!(1)
#[inline]
pub fn le_i8(i: &[u8]) -> IResult<&[u8], i8> {
map!(i, le_u8, |x| x as i8)
}
/// Recognizes little endian signed 2 bytes integer
#[inline]
pub fn le_i16(i: &[u8]) -> IResult<&[u8], i16> {
map!(i, le_u16, |x| x as i16)
}
/// Recognizes little endian signed 3 bytes integer
#[inline]
pub fn le_i24(i: &[u8]) -> IResult<&[u8], i32> {
// Same as the unsigned version but we need to sign-extend manually here
map!(i, le_u24, |x| if x & 0x80_00_00 != 0 {
(x | 0xff_00_00_00) as i32
} else {
x as i32
})
}
/// Recognizes little endian signed 4 bytes integer
#[inline]
pub fn le_i32(i: &[u8]) -> IResult<&[u8], i32> {
map!(i, le_u32, |x| x as i32)
}
/// Recognizes little endian signed 8 bytes integer
#[inline]
pub fn le_i64(i: &[u8]) -> IResult<&[u8], i64> {
map!(i, le_u64, |x| x as i64)
}
/// Configurable endianness
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Endianness {
Big,
Little,
}
/// if the parameter is nom::Endianness::Big, parse a big endian u16 integer,
/// otherwise a little endian u16 integer
#[macro_export]
macro_rules! u16 ( ($i:expr, $e:expr) => ( {if $crate::Endianness::Big == $e { be_u16($i) } else { le_u16($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian u32 integer,
/// otherwise a little endian u32 integer
#[macro_export]
macro_rules! u32 ( ($i:expr, $e:expr) => ( {if $crate::Endianness::Big == $e { be_u32($i) } else { le_u32($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian u64 integer,
/// otherwise a little endian u64 integer
#[macro_export]
macro_rules! u64 ( ($i:expr, $e:expr) => ( {if $crate::Endianness::Big == $e { be_u64($i) } else { le_u64($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian i16 integer,
/// otherwise a little endian i16 integer
#[macro_export]
macro_rules! i16 ( ($i:expr, $e:expr) => ( {if $crate::Endianness::Big == $e { be_i16($i) } else { le_i16($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian i32 integer,
/// otherwise a little endian i32 integer
#[macro_export]
macro_rules! i32 ( ($i:expr, $e:expr) => ( {if $crate::Endianness::Big == $e { be_i32($i) } else { le_i32($i) } } ););
/// if the parameter is nom::Endianness::Big, parse a big endian i64 integer,
/// otherwise a little endian i64 integer
#[macro_export]
macro_rules! i64 ( ($i:expr, $e:expr) => ( {if $crate::Endianness::Big == $e { be_i64($i) } else { le_i64($i) } } ););
/// Recognizes big endian 4 bytes floating point number
#[inline]
pub fn be_f32(input: &[u8]) -> IResult<&[u8], f32> {
match be_u32(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u32, f32>(o))) },
}
}
/// Recognizes big endian 8 bytes floating point number
#[inline]
pub fn be_f64(input: &[u8]) -> IResult<&[u8], f64> {
match be_u64(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u64, f64>(o))) },
}
}
/// Recognizes little endian 4 bytes floating point number
#[inline]
pub fn le_f32(input: &[u8]) -> IResult<&[u8], f32> {
match le_u32(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u32, f32>(o))) },
}
}
/// Recognizes little endian 8 bytes floating point number
#[inline]
pub fn le_f64(input: &[u8]) -> IResult<&[u8], f64> {
match le_u64(input) {
Err(e) => Err(e),
Ok((i, o)) => unsafe { Ok((i, transmute::<u64, f64>(o))) },
}
}
/// Recognizes a hex-encoded integer
#[inline]
pub fn hex_u32(input: &[u8]) -> IResult<&[u8], u32> {
match is_a!(input, &b"0123456789abcdefABCDEF"[..]) {
Err(e) => Err(e),
Ok((i, o)) => {
// Do not parse more than 8 characters for a u32
let (parsed, remaining) = if o.len() <= 8 {
(o, i)
} else {
(&input[..8], &input[8..])
};
let res = parsed
.iter()
.rev()
.enumerate()
.map(|(k, &v)| {
let digit = v as char;
digit.to_digit(16).unwrap_or(0) << (k * 4)
})
.sum();
Ok((remaining, res))
}
}
}
/// Recognizes non empty buffers
#[inline]
pub fn non_empty<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputLength + AtEof,
{
if input.input_len() == 0 {
return need_more_err(input, Needed::Unknown, ErrorKind::NonEmpty::<u32>);
} else {
Ok((input.slice(input.input_len()..), input))
}
}
/// Return the remaining input.
#[inline]
pub fn rest<T>(input: T) -> IResult<T, T>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: InputLength,
{
Ok((input.slice(input.input_len()..), input))
}
/// Return the remaining input, for strings.
#[inline]
pub fn rest_s(input: &str) -> IResult<&str, &str> {
Ok((&input[input.len()..], input))
}
#[allow(unused_imports)]
#[cfg_attr(rustfmt, rustfmt_skip)]
pub fn recognize_float<T>(input: T) -> IResult<T, T, u32>
where
T: Slice<Range<usize>> + Slice<RangeFrom<usize>> + Slice<RangeTo<usize>>,
T: Clone + Offset,
T: InputIter + AtEof,
<T as InputIter>::Item: AsChar,
T: InputTakeAtPosition,
<T as InputTakeAtPosition>::Item: AsChar
{
recognize!(input,
tuple!(
opt!(alt!(char!('+') | char!('-'))),
alt!(
value!((), tuple!(digit, opt!(pair!(char!('.'), opt!(digit)))))
| value!((), tuple!(char!('.'), digit))
),
opt!(tuple!(
alt!(char!('e') | char!('E')),
opt!(alt!(char!('+') | char!('-'))),
digit
)
)
)
)
}
/// Recognizes floating point number in a byte string and returns a f32
#[cfg(feature = "alloc")]
pub fn float(input: &[u8]) -> IResult<&[u8], f32> {
flat_map!(input, recognize_float, parse_to!(f32))
}
/// Recognizes floating point number in a string and returns a f32
#[cfg(feature = "alloc")]
pub fn float_s(input: &str) -> IResult<&str, f32> {
flat_map!(input, call!(recognize_float), parse_to!(f32))
}
/// Recognizes floating point number in a byte string and returns a f64
#[cfg(feature = "alloc")]
pub fn double(input: &[u8]) -> IResult<&[u8], f64> {
flat_map!(input, call!(recognize_float), parse_to!(f64))
}
/// Recognizes floating point number in a string and returns a f64
#[cfg(feature = "alloc")]
pub fn double_s(input: &str) -> IResult<&str, f64> {
flat_map!(input, call!(recognize_float), parse_to!(f64))
}
#[cfg(test)]
mod tests {
use super::*;
use internal::{Err, IResult, Needed};
use types::{CompleteByteSlice, CompleteStr};
#[test]
#[cfg(feature = "alloc")]
fn tag_closure() {
let x = tag_cl(&b"abcd"[..]);
let r = x(&b"abcdabcdefgh"[..]);
assert_eq!(r, Ok((&b"abcdefgh"[..], &b"abcd"[..])));
let r2 = x(&b"abcefgh"[..]);
assert_eq!(
r2,
Err(Err::Error(error_position!(
&b"abcefgh"[..],
ErrorKind::TagClosure
),))
);
}
#[test]
fn character() {
let empty: &[u8] = b"";
let a: &[u8] = b"abcd";
let b: &[u8] = b"1234";
let c: &[u8] = b"a123";
let d: &[u8] = "azé12".as_bytes();
let e: &[u8] = b" ";
let f: &[u8] = b" ;";
assert_eq!(alpha(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alpha(CompleteByteSlice(a)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(a)))
);
assert_eq!(
alpha(b),
Err(Err::Error(error_position!(b, ErrorKind::Alpha)))
);
assert_eq!(alpha(c), Ok((&c[1..], &b"a"[..])));
assert_eq!(alpha(d), Ok(("é12".as_bytes(), &b"az"[..])));
assert_eq!(
digit(a),
Err(Err::Error(error_position!(a, ErrorKind::Digit)))
);
assert_eq!(digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
digit(CompleteByteSlice(b)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b)))
);
assert_eq!(
digit(c),
Err(Err::Error(error_position!(c, ErrorKind::Digit)))
);
assert_eq!(
digit(d),
Err(Err::Error(error_position!(d, ErrorKind::Digit)))
);
assert_eq!(hex_digit(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteByteSlice(a)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(a)))
);
assert_eq!(hex_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteByteSlice(b)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b)))
);
assert_eq!(hex_digit(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteByteSlice(c)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(c)))
);
assert_eq!(hex_digit(d), Ok(("zé12".as_bytes(), &b"a"[..])));
assert_eq!(
hex_digit(e),
Err(Err::Error(error_position!(e, ErrorKind::HexDigit)))
);
assert_eq!(
oct_digit(a),
Err(Err::Error(error_position!(a, ErrorKind::OctDigit)))
);
assert_eq!(oct_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
oct_digit(CompleteByteSlice(b)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b)))
);
assert_eq!(
oct_digit(c),
Err(Err::Error(error_position!(c, ErrorKind::OctDigit)))
);
assert_eq!(
oct_digit(d),
Err(Err::Error(error_position!(d, ErrorKind::OctDigit)))
);
assert_eq!(alphanumeric(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteByteSlice(a)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(a)))
);
//assert_eq!(fix_error!(b,(), alphanumeric), Ok((empty, b)));
assert_eq!(alphanumeric(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteByteSlice(c)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(c)))
);
assert_eq!(alphanumeric(d), Ok(("é12".as_bytes(), &b"az"[..])));
assert_eq!(space(e), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
space(CompleteByteSlice(e)),
Ok((CompleteByteSlice(empty), CompleteByteSlice(b" ")))
);
assert_eq!(space(f), Ok((&b";"[..], &b" "[..])));
assert_eq!(
space(CompleteByteSlice(f)),
Ok((CompleteByteSlice(b";"), CompleteByteSlice(b" ")))
);
}
#[cfg(feature = "alloc")]
#[test]
fn character_s() {
let empty = "";
let a = "abcd";
let b = "1234";
let c = "a123";
let d = "azé12";
let e = " ";
assert_eq!(alpha(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alpha(CompleteStr(a)),
Ok((CompleteStr(empty), CompleteStr(a)))
);
assert_eq!(
alpha(b),
Err(Err::Error(error_position!(b, ErrorKind::Alpha)))
);
assert_eq!(alpha(c), Ok((&c[1..], &"a"[..])));
assert_eq!(alpha(d), Ok(("12", &"azé"[..])));
assert_eq!(
digit(a),
Err(Err::Error(error_position!(a, ErrorKind::Digit)))
);
assert_eq!(digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
digit(CompleteStr(b)),
Ok((CompleteStr(empty), CompleteStr(b)))
);
assert_eq!(
digit(c),
Err(Err::Error(error_position!(c, ErrorKind::Digit)))
);
assert_eq!(
digit(d),
Err(Err::Error(error_position!(d, ErrorKind::Digit)))
);
assert_eq!(hex_digit(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteStr(a)),
Ok((CompleteStr(empty), CompleteStr(a)))
);
assert_eq!(hex_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteStr(b)),
Ok((CompleteStr(empty), CompleteStr(b)))
);
assert_eq!(hex_digit(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
hex_digit(CompleteStr(c)),
Ok((CompleteStr(empty), CompleteStr(c)))
);
assert_eq!(hex_digit(d), Ok(("zé12", &"a"[..])));
assert_eq!(
hex_digit(e),
Err(Err::Error(error_position!(e, ErrorKind::HexDigit)))
);
assert_eq!(
oct_digit(a),
Err(Err::Error(error_position!(a, ErrorKind::OctDigit)))
);
assert_eq!(oct_digit(b), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
oct_digit(CompleteStr(b)),
Ok((CompleteStr(empty), CompleteStr(b)))
);
assert_eq!(
oct_digit(c),
Err(Err::Error(error_position!(c, ErrorKind::OctDigit)))
);
assert_eq!(
oct_digit(d),
Err(Err::Error(error_position!(d, ErrorKind::OctDigit)))
);
assert_eq!(alphanumeric(a), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteStr(a)),
Ok((CompleteStr(empty), CompleteStr(a)))
);
//assert_eq!(fix_error!(b,(), alphanumeric), Ok((empty, b)));
assert_eq!(alphanumeric(c), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteStr(c)),
Ok((CompleteStr(empty), CompleteStr(c)))
);
assert_eq!(alphanumeric(d), Err(Err::Incomplete(Needed::Size(1))));
assert_eq!(
alphanumeric(CompleteStr(d)),
Ok((CompleteStr(""), CompleteStr("azé12")))
);
assert_eq!(space(e), Err(Err::Incomplete(Needed::Size(1))));
}
use traits::Offset;
#[test]
fn offset() {
let a = &b"abcd;"[..];
let b = &b"1234;"[..];
let c = &b"a123;"[..];
let d = &b" \t;"[..];
let e = &b" \t\r\n;"[..];
let f = &b"123abcDEF;"[..];
match alpha(a) {
Ok((i, _)) => {
assert_eq!(a.offset(i) + i.len(), a.len());
}
_ => panic!("wrong return type in offset test for alpha"),
}
match digit(b) {
Ok((i, _)) => {
assert_eq!(b.offset(i) + i.len(), b.len());
}
_ => panic!("wrong return type in offset test for digit"),
}
match alphanumeric(c) {
Ok((i, _)) => {
assert_eq!(c.offset(i) + i.len(), c.len());
}
_ => panic!("wrong return type in offset test for alphanumeric"),
}
match space(d) {
Ok((i, _)) => {
assert_eq!(d.offset(i) + i.len(), d.len());
}
_ => panic!("wrong return type in offset test for space"),
}
match multispace(e) {
Ok((i, _)) => {
assert_eq!(e.offset(i) + i.len(), e.len());
}
_ => panic!("wrong return type in offset test for multispace"),
}
match hex_digit(f) {
Ok((i, _)) => {
assert_eq!(f.offset(i) + i.len(), f.len());
}
_ => panic!("wrong return type in offset test for hex_digit"),
}
match oct_digit(f) {
Ok((i, _)) => {
assert_eq!(f.offset(i) + i.len(), f.len());
}
_ => panic!("wrong return type in offset test for oct_digit"),
}
}
#[test]
fn is_not_line_ending_bytes() {
let a: &[u8] = b"ab12cd\nefgh";
assert_eq!(not_line_ending(a), Ok((&b"\nefgh"[..], &b"ab12cd"[..])));
let b: &[u8] = b"ab12cd\nefgh\nijkl";
assert_eq!(
not_line_ending(b),
Ok((&b"\nefgh\nijkl"[..], &b"ab12cd"[..]))
);
let c: &[u8] = b"ab12cd\r\nefgh\nijkl";
assert_eq!(
not_line_ending(c),
Ok((&b"\r\nefgh\nijkl"[..], &b"ab12cd"[..]))
);
let d = CompleteByteSlice(b"ab12cd");
assert_eq!(not_line_ending(d), Ok((CompleteByteSlice(b""), d)));
let d: &[u8] = b"ab12cd";
assert_eq!(not_line_ending(d), Err(Err::Incomplete(Needed::Unknown)));
}
#[test]
fn is_not_line_ending_str() {
/*
let a: &str = "ab12cd\nefgh";
assert_eq!(not_line_ending(a), Ok((&"\nefgh"[..], &"ab12cd"[..])));
let b: &str = "ab12cd\nefgh\nijkl";
assert_eq!(not_line_ending(b), Ok((&"\nefgh\nijkl"[..], &"ab12cd"[..])));
let c: &str = "ab12cd\r\nefgh\nijkl";
assert_eq!(not_line_ending(c), Ok((&"\r\nefgh\nijkl"[..], &"ab12cd"[..])));
let d = "βèƒôřè\nÂßÇáƒƭèř";
assert_eq!(not_line_ending(d), Ok((&"\nÂßÇáƒƭèř"[..], &"βèƒôřè"[..])));
let e = "βèƒôřè\r\nÂßÇáƒƭèř";
assert_eq!(not_line_ending(e), Ok((&"\r\nÂßÇáƒƭèř"[..], &"βèƒôřè"[..])));
*/
let f = "βèƒôřè\rÂßÇáƒƭèř";
assert_eq!(
not_line_ending(f),
Err(Err::Error(error_position!(f, ErrorKind::Tag)))
);
let g = CompleteStr("ab12cd");
assert_eq!(not_line_ending(g), Ok((CompleteStr(""), g)));
let g2: &str = "ab12cd";
assert_eq!(not_line_ending(g2), Err(Err::Incomplete(Needed::Unknown)));
}
#[test]
#[cfg(feature = "alloc")]
fn buffer_with_size() {
use lib::std::vec::Vec;
let i: Vec<u8> = vec![7, 8];
let o: Vec<u8> = vec![4, 5, 6];
//let arr:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let arr: [u8; 6usize] = [3, 4, 5, 6, 7, 8];
let res = sized_buffer(&arr[..]);
assert_eq!(res, Ok((&i[..], &o[..])))
}
/*#[test]
fn t1() {
let v1:Vec<u8> = vec![1,2,3];
let v2:Vec<u8> = vec![4,5,6];
let d = Ok((&v1[..], &v2[..]));
let res = d.flat_map(print);
assert_eq!(res, Ok((&v2[..], ())));
}*/
#[test]
fn i8_tests() {
assert_eq!(be_i8(&[0x00]), Ok((&b""[..], 0)));
assert_eq!(be_i8(&[0x7f]), Ok((&b""[..], 127)));
assert_eq!(be_i8(&[0xff]), Ok((&b""[..], -1)));
assert_eq!(be_i8(&[0x80]), Ok((&b""[..], -128)));
}
#[test]
fn i16_tests() {
assert_eq!(be_i16(&[0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(be_i16(&[0x7f, 0xff]), Ok((&b""[..], 32_767_i16)));
assert_eq!(be_i16(&[0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(be_i16(&[0x80, 0x00]), Ok((&b""[..], -32_768_i16)));
}
#[test]
fn u24_tests() {
assert_eq!(be_u24(&[0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(be_u24(&[0x00, 0xFF, 0xFF]), Ok((&b""[..], 65_535_u32)));
assert_eq!(be_u24(&[0x12, 0x34, 0x56]), Ok((&b""[..], 1_193_046_u32)));
}
#[test]
fn i24_tests() {
assert_eq!(be_i24(&[0xFF, 0xFF, 0xFF]), Ok((&b""[..], -1_i32)));
assert_eq!(be_i24(&[0xFF, 0x00, 0x00]), Ok((&b""[..], -65_536_i32)));
assert_eq!(be_i24(&[0xED, 0xCB, 0xAA]), Ok((&b""[..], -1_193_046_i32)));
}
#[test]
fn i32_tests() {
assert_eq!(be_i32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(
be_i32(&[0x7f, 0xff, 0xff, 0xff]),
Ok((&b""[..], 2_147_483_647_i32))
);
assert_eq!(be_i32(&[0xff, 0xff, 0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(
be_i32(&[0x80, 0x00, 0x00, 0x00]),
Ok((&b""[..], -2_147_483_648_i32))
);
}
#[test]
fn i64_tests() {
assert_eq!(
be_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0))
);
assert_eq!(
be_i64(&[0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]),
Ok((&b""[..], 9_223_372_036_854_775_807_i64))
);
assert_eq!(
be_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]),
Ok((&b""[..], -1))
);
assert_eq!(
be_i64(&[0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], -9_223_372_036_854_775_808_i64))
);
}
#[test]
fn le_i8_tests() {
assert_eq!(le_i8(&[0x00]), Ok((&b""[..], 0)));
assert_eq!(le_i8(&[0x7f]), Ok((&b""[..], 127)));
assert_eq!(le_i8(&[0xff]), Ok((&b""[..], -1)));
assert_eq!(le_i8(&[0x80]), Ok((&b""[..], -128)));
}
#[test]
fn le_i16_tests() {
assert_eq!(le_i16(&[0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(le_i16(&[0xff, 0x7f]), Ok((&b""[..], 32_767_i16)));
assert_eq!(le_i16(&[0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(le_i16(&[0x00, 0x80]), Ok((&b""[..], -32_768_i16)));
}
#[test]
fn le_u24_tests() {
assert_eq!(le_u24(&[0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(le_u24(&[0xFF, 0xFF, 0x00]), Ok((&b""[..], 65_535_u32)));
assert_eq!(le_u24(&[0x56, 0x34, 0x12]), Ok((&b""[..], 1_193_046_u32)));
}
#[test]
fn le_i24_tests() {
assert_eq!(le_i24(&[0xFF, 0xFF, 0xFF]), Ok((&b""[..], -1_i32)));
assert_eq!(le_i24(&[0x00, 0x00, 0xFF]), Ok((&b""[..], -65_536_i32)));
assert_eq!(le_i24(&[0xAA, 0xCB, 0xED]), Ok((&b""[..], -1_193_046_i32)));
}
#[test]
fn le_i32_tests() {
assert_eq!(le_i32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0)));
assert_eq!(
le_i32(&[0xff, 0xff, 0xff, 0x7f]),
Ok((&b""[..], 2_147_483_647_i32))
);
assert_eq!(le_i32(&[0xff, 0xff, 0xff, 0xff]), Ok((&b""[..], -1)));
assert_eq!(
le_i32(&[0x00, 0x00, 0x00, 0x80]),
Ok((&b""[..], -2_147_483_648_i32))
);
}
#[test]
fn le_i64_tests() {
assert_eq!(
le_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0))
);
assert_eq!(
le_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f]),
Ok((&b""[..], 9_223_372_036_854_775_807_i64))
);
assert_eq!(
le_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]),
Ok((&b""[..], -1))
);
assert_eq!(
le_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80]),
Ok((&b""[..], -9_223_372_036_854_775_808_i64))
);
}
#[test]
fn be_f32_tests() {
assert_eq!(be_f32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0_f32)));
assert_eq!(
be_f32(&[0x4d, 0x31, 0x1f, 0xd8]),
Ok((&b""[..], 185_728_392_f32))
);
}
#[test]
fn be_f64_tests() {
assert_eq!(
be_f64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0_f64))
);
assert_eq!(
be_f64(&[0x41, 0xa6, 0x23, 0xfb, 0x10, 0x00, 0x00, 0x00]),
Ok((&b""[..], 185_728_392_f64))
);
}
#[test]
fn le_f32_tests() {
assert_eq!(le_f32(&[0x00, 0x00, 0x00, 0x00]), Ok((&b""[..], 0_f32)));
assert_eq!(
le_f32(&[0xd8, 0x1f, 0x31, 0x4d]),
Ok((&b""[..], 185_728_392_f32))
);
}
#[test]
fn le_f64_tests() {
assert_eq!(
le_f64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]),
Ok((&b""[..], 0_f64))
);
assert_eq!(
le_f64(&[0x00, 0x00, 0x00, 0x10, 0xfb, 0x23, 0xa6, 0x41]),
Ok((&b""[..], 185_728_392_f64))
);
}
#[test]
fn hex_u32_tests() {
assert_eq!(
hex_u32(&b";"[..]),
Err(Err::Error(error_position!(&b";"[..], ErrorKind::IsA)))
);
assert_eq!(hex_u32(&b"ff;"[..]), Ok((&b";"[..], 255)));
assert_eq!(hex_u32(&b"1be2;"[..]), Ok((&b";"[..], 7138)));
assert_eq!(hex_u32(&b"c5a31be2;"[..]), Ok((&b";"[..], 3_315_801_058)));
assert_eq!(hex_u32(&b"C5A31be2;"[..]), Ok((&b";"[..], 3_315_801_058)));
assert_eq!(hex_u32(&b"00c5a31be2;"[..]), Ok((&b"e2;"[..], 12_952_347)));
assert_eq!(
hex_u32(&b"c5a31be201;"[..]),
Ok((&b"01;"[..], 3_315_801_058))
);
assert_eq!(hex_u32(&b"ffffffff;"[..]), Ok((&b";"[..], 4_294_967_295)));
assert_eq!(hex_u32(&b"0x1be2;"[..]), Ok((&b"x1be2;"[..], 0)));
}
/*
#[test]
fn end_of_input() {
let not_over = &b"Hello, world!"[..];
let is_over = &b""[..];
named!(eof_test, eof!());
let res_not_over = eof_test(not_over);
assert_eq!(res_not_over, Err(Err::Error(error_position!(not_over, ErrorKind::Eof))));
let res_over = eof_test(is_over);
assert_eq!(res_over, Ok((is_over, is_over)));
}
*/
#[test]
fn rest_on_slices() {
let input: &[u8] = &b"Hello, world!"[..];
let empty: &[u8] = &b""[..];
assert_eq!(rest(input), Ok((empty, input)));
}
#[test]
fn rest_on_strs() {
let input: &str = "Hello, world!";
let empty: &str = "";
assert_eq!(rest(input), Ok((empty, input)));
}
#[test]
fn configurable_endianness() {
named!(be_tst16<u16>, u16!(Endianness::Big));
named!(le_tst16<u16>, u16!(Endianness::Little));
assert_eq!(be_tst16(&[0x80, 0x00]), Ok((&b""[..], 32_768_u16)));
assert_eq!(le_tst16(&[0x80, 0x00]), Ok((&b""[..], 128_u16)));
named!(be_tst32<u32>, u32!(Endianness::Big));
named!(le_tst32<u32>, u32!(Endianness::Little));
assert_eq!(
be_tst32(&[0x12, 0x00, 0x60, 0x00]),
Ok((&b""[..], 302_014_464_u32))
);
assert_eq!(
le_tst32(&[0x12, 0x00, 0x60, 0x00]),
Ok((&b""[..], 6_291_474_u32))
);
named!(be_tst64<u64>, u64!(Endianness::Big));
named!(le_tst64<u64>, u64!(Endianness::Little));
assert_eq!(
be_tst64(&[0x12, 0x00, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 1_297_142_246_100_992_000_u64))
);
assert_eq!(
le_tst64(&[0x12, 0x00, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 36_028_874_334_666_770_u64))
);
named!(be_tsti16<i16>, i16!(Endianness::Big));
named!(le_tsti16<i16>, i16!(Endianness::Little));
assert_eq!(be_tsti16(&[0x00, 0x80]), Ok((&b""[..], 128_i16)));
assert_eq!(le_tsti16(&[0x00, 0x80]), Ok((&b""[..], -32_768_i16)));
named!(be_tsti32<i32>, i32!(Endianness::Big));
named!(le_tsti32<i32>, i32!(Endianness::Little));
assert_eq!(
be_tsti32(&[0x00, 0x12, 0x60, 0x00]),
Ok((&b""[..], 1_204_224_i32))
);
assert_eq!(
le_tsti32(&[0x00, 0x12, 0x60, 0x00]),
Ok((&b""[..], 6_296_064_i32))
);
named!(be_tsti64<i64>, i64!(Endianness::Big));
named!(le_tsti64<i64>, i64!(Endianness::Little));
assert_eq!(
be_tsti64(&[0x00, 0xFF, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 71_881_672_479_506_432_i64))
);
assert_eq!(
le_tsti64(&[0x00, 0xFF, 0x60, 0x00, 0x12, 0x00, 0x80, 0x00]),
Ok((&b""[..], 36_028_874_334_732_032_i64))
);
}
#[test]
#[cfg(feature = "std")]
fn manual_configurable_endianness_test() {
let x = 1;
let int_parse: Box<Fn(&[u8]) -> IResult<&[u8], u16>> = if x == 2 {
Box::new(be_u16)
} else {
Box::new(le_u16)
};
println!("{:?}", int_parse(&b"3"[..]));
assert_eq!(int_parse(&[0x80, 0x00]), Ok((&b""[..], 128_u16)));
}
use lib::std::convert::From;
impl From<u32> for CustomError {
fn from(_: u32) -> Self {
CustomError
}
}
struct CustomError;
#[allow(dead_code)]
fn custom_error(input: &[u8]) -> IResult<&[u8], &[u8], CustomError> {
fix_error!(input, CustomError, alphanumeric)
}
#[test]
fn hex_digit_test() {
let i = &b"0123456789abcdefABCDEF;"[..];
assert_eq!(hex_digit(i), Ok((&b";"[..], &i[..i.len() - 1])));
let i = &b"g"[..];
assert_eq!(
hex_digit(i),
Err(Err::Error(error_position!(i, ErrorKind::HexDigit)))
);
let i = &b"G"[..];
assert_eq!(
hex_digit(i),
Err(Err::Error(error_position!(i, ErrorKind::HexDigit)))
);
assert!(is_hex_digit(b'0'));
assert!(is_hex_digit(b'9'));
assert!(is_hex_digit(b'a'));
assert!(is_hex_digit(b'f'));
assert!(is_hex_digit(b'A'));
assert!(is_hex_digit(b'F'));
assert!(!is_hex_digit(b'g'));
assert!(!is_hex_digit(b'G'));
assert!(!is_hex_digit(b'/'));
assert!(!is_hex_digit(b':'));
assert!(!is_hex_digit(b'@'));
assert!(!is_hex_digit(b'\x60'));
}
#[test]
fn oct_digit_test() {
let i = &b"01234567;"[..];
assert_eq!(oct_digit(i), Ok((&b";"[..], &i[..i.len() - 1])));
let i = &b"8"[..];
assert_eq!(
oct_digit(i),
Err(Err::Error(error_position!(i, ErrorKind::OctDigit)))
);
assert!(is_oct_digit(b'0'));
assert!(is_oct_digit(b'7'));
assert!(!is_oct_digit(b'8'));
assert!(!is_oct_digit(b'9'));
assert!(!is_oct_digit(b'a'));
assert!(!is_oct_digit(b'A'));
assert!(!is_oct_digit(b'/'));
assert!(!is_oct_digit(b':'));
assert!(!is_oct_digit(b'@'));
assert!(!is_oct_digit(b'\x60'));
}
#[test]
fn full_line_windows() {
named!(
take_full_line<(&[u8], &[u8])>,
tuple!(not_line_ending, line_ending)
);
let input = b"abc\r\n";
let output = take_full_line(input);
assert_eq!(output, Ok((&b""[..], (&b"abc"[..], &b"\r\n"[..]))));
}
#[test]
fn full_line_unix() {
named!(
take_full_line<(&[u8], &[u8])>,
tuple!(not_line_ending, line_ending)
);
let input = b"abc\n";
let output = take_full_line(input);
assert_eq!(output, Ok((&b""[..], (&b"abc"[..], &b"\n"[..]))));
}
#[test]
fn check_windows_lineending() {
let input = b"\r\n";
let output = line_ending(&input[..]);
assert_eq!(output, Ok((&b""[..], &b"\r\n"[..])));
}
#[test]
fn check_unix_lineending() {
let input = b"\n";
let output = line_ending(&input[..]);
assert_eq!(output, Ok((&b""[..], &b"\n"[..])));
}
#[test]
fn cr_lf() {
assert_eq!(crlf(&b"\r\na"[..]), Ok((&b"a"[..], &b"\r\n"[..])));
assert_eq!(crlf(&b"\r"[..]), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
crlf(&b"\ra"[..]),
Err(Err::Error(error_position!(&b"\ra"[..], ErrorKind::CrLf)))
);
assert_eq!(crlf("\r\na"), Ok(("a", "\r\n")));
assert_eq!(crlf("\r"), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
crlf("\ra"),
Err(Err::Error(error_position!("\ra", ErrorKind::CrLf)))
);
}
#[test]
fn end_of_line() {
assert_eq!(eol(&b"\na"[..]), Ok((&b"a"[..], &b"\n"[..])));
assert_eq!(eol(&b"\r\na"[..]), Ok((&b"a"[..], &b"\r\n"[..])));
assert_eq!(eol(&b"\r"[..]), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
eol(&b"\ra"[..]),
Err(Err::Error(error_position!(&b"\ra"[..], ErrorKind::CrLf)))
);
assert_eq!(eol("\na"), Ok(("a", "\n")));
assert_eq!(eol("\r\na"), Ok(("a", "\r\n")));
assert_eq!(eol("\r"), Err(Err::Incomplete(Needed::Size(2))));
assert_eq!(
eol("\ra"),
Err(Err::Error(error_position!("\ra", ErrorKind::CrLf)))
);
}
#[test]
#[cfg(feature = "std")]
fn float_test() {
let mut test_cases = vec![
"+3.14",
"3.14",
"-3.14",
"0",
"0.0",
"1.",
".789",
"-.5",
"1e7",
"-1E-7",
".3e-2",
"1.e4",
"1.2e4",
"-1.234E-12",
"-1.234e-12",
];
for test in test_cases.drain(..) {
let expected32 = str::parse::<f32>(test).unwrap();
let expected64 = str::parse::<f64>(test).unwrap();
println!("now parsing: {} -> {}", test, expected32);
assert_eq!(
recognize_float(CompleteStr(test)),
Ok((CompleteStr(""), CompleteStr(test)))
);
let larger = format!("{};", test);
assert_eq!(recognize_float(&larger[..]), Ok((";", test)));
assert_eq!(float(larger.as_bytes()), Ok((&b";"[..], expected32)));
assert_eq!(float_s(&larger[..]), Ok((";", expected32)));
assert_eq!(double(larger.as_bytes()), Ok((&b";"[..], expected64)));
assert_eq!(double_s(&larger[..]), Ok((";", expected64)));
}
let remaining_exponent = "-1.234E-";
assert_eq!(
recognize_float(remaining_exponent),
Err(Err::Incomplete(Needed::Size(1)))
);
}
#[allow(dead_code)]
pub fn end_of_line_completestr(input: CompleteStr) -> IResult<CompleteStr, CompleteStr> {
alt!(input, eof!() | eol)
}
}
|
//! Useful parser combinators
//!
//! A number of useful parser combinators have already been implemented.
//! Some of them use macros, other are implemented through functions.
//! Hopefully, the syntax will converge to onely one way in the future,
//! but the macros system makes no promises.
//!
extern crate collections;
use std::fmt::Debug;
use internal::*;
use internal::IResult::*;
/// declares a byte array as a suite to recognize
///
/// consumes the recognized characters
///
/// ```ignore
/// tag!(x "abcd".as_bytes());
/// let r = Done((), "abcdabcdefgh".as_bytes()).flat_map(x);
/// assert_eq!(r, Done("efgh".as_bytes(), "abcd".as_bytes()));
/// ```
#[macro_export]
macro_rules! tag(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
if i.len() >= $inp.len() && i.slice(0, $inp.len()) == $inp {
Done(&i[$inp.len()..], &i[0..$inp.len()])
} else {
Error(0)
}
}
)
);
pub fn tag_cl<'a,'b>(rec:&'a[u8]) -> Box<Fn(&'b[u8]) -> IResult<&'b[u8], &'b[u8]>> {
Box::new(move |i: &'b[u8]| -> IResult<&'b[u8], &'b[u8]> {
if i.len() >= rec.len() && i.slice(0, rec.len()) == rec {
Done(&i[rec.len()..], &i[0..rec.len()])
} else {
Error(0)
}
})
}
/// chains parsers and returns the result of only one of them
///
/// ```ignore
/// tag!(x "abcd".as_bytes());
/// tag!(y "efgh".as_bytes());
///
/// fn ret_int(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
///
/// // parse the x tag two times, return an int, parse the y tag
/// o!(z<&[u8], u8> x ~ x ~ [ ret_int ] ~ y);
///
/// let r = Done((), "abcdabcdefgh".as_bytes()).flat_map(z);
/// assert_eq!(r, Done("".as_bytes(), 1));
/// ```
#[macro_export]
macro_rules! o(
($name:ident<$i:ty,$o:ty> $f1:ident ~ $($rest:tt)*) => (
#[allow(unused_variables)]
fn $name(input:$i) -> IResult<$i, $o>{
match $f1(input) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,o) => {
o_parser!(i ~ o ~ $($rest)*)
}
}
}
);
);
#[macro_export]
macro_rules! o_parser(
($i:ident ~ $o:ident ~ [ $e:ident ] ~ $s:ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,o) => {
match $s(i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i2) => IResult::Incomplete(i2),//IResult::Incomplete(i),
IResult::Done(i2,o2) => {
IResult::Done(i2, o)
}
}
}
}
);
($i:ident ~ $o:ident ~ [ $e:ident ] ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,o) => {
o_parser!(i ~ o ~ $($rest)*)
}
}
);
($i:ident ~ $o:ident ~ [ $e:ident ]) => (
$e($i)
);
($i:ident ~ $o:ident ~ $e:ident ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,_) => {
o_parser!(i ~ $o ~ $($rest)*)
}
}
);
($i:ident ~ $o:ident ~ $e:ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,_) => {
IResult::Done(i, $o)
}
}
);
($i:ident ~ $o:ident) => (Done($i,$o));
);
#[macro_export]
macro_rules! chain (
($name:ident<$i:ty,$o:ty>, $assemble:expr, $($rest:tt)*) => (
#[allow(unused_variables)]
fn $name(i:$i) -> IResult<$i,$o>{
chaining_parser!(i, $assemble, $($rest)*)
}
);
);
#[macro_export]
macro_rules! chaining_parser (
($i:expr, $assemble:expr, $e:ident ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,_) => {
chaining_parser!(i, $assemble, $($rest)*)
}
}
);
($i:expr, $assemble:expr, $field:ident : $e:ident ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,o) => {
let $field = o;
chaining_parser!(i, $assemble, $($rest)*)
}
}
);
($i:expr, $assemble:expr, $field:ident : $e:ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,o) => {
let $field = o;
IResult::Done(i, $assemble())
}
}
);
($i:expr, $assemble:expr, $e: ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,_) => {
IResult::Done(i, $assemble())
}
}
);
($i:expr, $assemble:expr,) => (
IResult::Done($i, $assemble())
)
);
#[macro_export]
macro_rules! alt (
($name:ident<$i:ty,$o:ty>, $($rest:tt)*) => (
fn $name(i:$i) -> IResult<$i,$o>{
alt_parser!(i | $($rest)*)
}
);
);
#[macro_export]
macro_rules! alt_parser (
($i:ident | $e:ident | $($rest:tt)*) => (
match $e($i) {
IResult::Error(_) => alt_parser!($i | $($rest)*),
IResult::Incomplete(_) => alt_parser!($i | $($rest)*),
IResult::Done(i,o) => IResult::Done(i,o)
}
);
($i:ident | $e:ident) => (
match $e($i) {
IResult::Error(_) => alt_parser!($i),
IResult::Incomplete(_) => alt_parser!($i),
IResult::Done(i,o) => IResult::Done(i,o)
}
);
($i:ident) => (
IResult::Error(1)
)
);
pub fn print<'y,T: Debug>(input: T) -> IResult<'y,T, ()> {
println!("{:?}", input);
Done(input, ())
}
pub fn begin<'a,'y>(input: &'a [u8]) -> IResult<'y,(), &'a [u8]> {
Done((), input)
}
#[macro_export]
macro_rules! is_not(
($name:ident $arr:expr) => (
fn $name(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in range(0, input.len()) {
for &i in $arr.iter() {
if input[idx] == i {
return IResult::Done(input.slice_from(idx), input.slice(0, idx))
}
}
}
IResult::Done("".as_bytes(), input)
}
)
);
#[macro_export]
macro_rules! is_a(
($name:ident $arr:expr) => (
fn $name(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in range(0, input.len()) {
var res = false
for &i in $arr.iter() {
if input[idx] == i {
res = true
}
}
if !res {
return IResult::Done(input.slice_from(idx), input.slice(0, idx))
}
}
IResult::Done("".as_bytes(), input)
}
)
);
#[macro_export]
macro_rules! filter(
($name:ident $f:ident) => (
fn $name(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !$f(input[idx]) {
return IResult::Done(&input[idx..], &input[0..idx])
}
}
IResult::Done("".as_bytes(), input)
}
)
);
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
// public methods
//pub is_not!(line_ending "\r\n".as_bytes())
pub fn not_line_ending(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
for &i in "\r\n".as_bytes().iter() {
if input[idx] == i {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done("".as_bytes(), input)
}
tag!(tag_ln "\n".as_bytes());
pub fn line_ending(input:&[u8]) -> IResult<&[u8], &[u8]> {
tag_ln(input)
}
pub fn is_alphabetic(chr:u8) -> bool {
(chr >= 0x41 && chr <= 0x5A) || (chr >= 0x61 && chr <= 0x7A)
}
pub fn is_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x39
}
pub fn is_alphanumeric(chr: u8) -> bool {
is_alphabetic(chr) || is_digit(chr)
}
pub fn is_space(chr:u8) -> bool {
chr == ' ' as u8 || chr == '\t' as u8
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
//pub filter!(alpha is_alphabetic)
//pub filter!(digit is_digit)
//pub filter!(alphanumeric is_alphanumeric)
pub fn alpha(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphabetic(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn digit(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_digit(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn alphanumeric(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphanumeric(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn space(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_space(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn multispace(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_space(input[idx]) && input[idx] != '\r' as u8 && input[idx] != '\n' as u8 {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn sized_buffer(input:&[u8]) -> IResult<&[u8], &[u8]> {
if input.len() == 0 {
return Incomplete(0)
}
let len = input[0] as usize;
if input.len() >= len + 1 {
return Done(&input[len+1..], &input[1..len+1])
} else {
return Incomplete(0)
}
}
#[macro_export]
macro_rules! opt(
($name:ident<$i:ty,$o:ty> $f:ident) => (
fn $name(input:$i) -> IResult<$i, Option<$o>> {
match $f(input) {
IResult::Done(i,o) => IResult::Done(i, Some(o)),
_ => IResult::Done(input, None)
}
}
)
);
// 0 or more
#[macro_export]
macro_rules! many0(
($name:ident<$i:ty,$o:ty> $f:ident) => (
fn $name(input:$i) -> IResult<$i,Vec<$o>> {
let mut begin = 0;
let mut remaining = input.len();
let mut res: Vec<$o> = Vec::new();
loop {
match $f(input.slice_from(begin)) {
IResult::Done(i,o) => {
res.push(o);
begin += remaining - i.len();
remaining = i.len();
if begin >= input.len() {
return IResult::Done(i, res)
}
},
_ => {
return IResult::Done(input.slice_from(begin), res)
}
}
}
}
)
);
// one or more
#[macro_export]
macro_rules! many1(
($name:ident<$i:ty,$o:ty> $f:ident) => (
fn $name(input:$i) -> IResult<$i,Vec<$o>> {
let mut begin = 0;
let mut remaining = input.len();
let mut res: Vec<$o> = Vec::new();
loop {
match $f(input.slice_from(begin)) {
IResult::Done(i,o) => {
res.push(o);
begin += remaining - i.len();
remaining = i.len();
if begin >= input.len() {
return IResult::Done(i, res)
}
},
_ => {
if begin == 0 {
return IResult::Error(0)
} else {
return IResult::Done(input.slice_from(begin), res)
}
}
}
}
}
)
);
#[macro_export]
macro_rules! fold0(
($name:ident<$i:ty,$o:ty>, $assemble:expr, $f:ident) => (
fn $name(input:$i, z:$o) -> IResult<$i,$o> {
fold0_impl!(<$i, $o>, $assemble, $f, input, z);
}
);
);
#[macro_export]
macro_rules! fold0_impl(
(<$i:ty,$o:ty>, $assemble:expr, $f:ident, $input:ident, $z:ident) => (
{
let mut begin = 0;
let mut remaining = $input.len();
let mut res: $o = $z;
loop {
match $f($input.slice_from(begin)) {
IResult::Done(i,o) => {
//res.push(o);
res = $assemble(res, o);
begin += remaining - i.len();
remaining = i.len();
if begin >= $input.len() {
return IResult::Done(i, res)
}
},
_ => {
return IResult::Done($input.slice_from(begin), res)
}
}
}
}
);
);
#[macro_export]
macro_rules! fold1(
($name:ident<$i:ty,$o:ty>, $assemble:expr, $f:ident) => (
fn $name(input:$i, z:$o) -> IResult<$i,$o> {
fold1_impl!(<$i, $o>, $assemble, $f, input, z);
}
);
);
#[macro_export]
macro_rules! fold1_impl(
(<$i:ty,$o:ty>, $assemble:expr, $f:ident, $input:ident, $z:ident) => (
{
let mut begin = 0;
let mut remaining = $input.len();
let mut res: $o = $z;
loop {
match $f($input.slice_from(begin)) {
IResult::Done(i,o) => {
//res.push(o);
res = $assemble(res, o);
begin += remaining - i.len();
remaining = i.len();
if begin >= $input.len() {
return IResult::Done(i, res)
}
},
_ => {
if begin == 0 {
return IResult::Error(0)
} else {
return IResult::Done($input.slice_from(begin), res)
}
}
}
}
}
);
);
pub fn length_value(input:&[u8]) -> IResult<&[u8], &[u8]> {
let input_len = input.len();
if input_len == 0 {
return IResult::Error(0)
}
let len = input[0] as usize;
if input_len - 1 >= len {
return IResult::Done(&input[len+1..], &input[1..len+1])
} else {
// FIXME: return Incomplete
return IResult::Error(0)
}
}
#[macro_export]
macro_rules! take(
($name:ident $count:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
if i.len() < $count {
Incomplete(0)
} else {
Done(&i[$count..],&i[0..$count])
}
}
)
);
#[macro_export]
macro_rules! take_until(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + $inp.len() > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+$inp.len()));
if i.slice(idx, idx+$inp.len()) == $inp {
println!("next 2");
if idx + $inp.len() > i.len() {
println!("next 3");
return Done("".as_bytes(), &i[0..idx])
} else {
println!("next 4");
return Done(&i[(idx+$inp.len())..], &i[0..idx])
}
}
println!("next");
}
return Error(0)
}
)
);
#[macro_export]
macro_rules! take_until_and_leave(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + $inp.len() > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+$inp.len()));
if i.slice(idx, idx+$inp.len()) == $inp {
println!("next 2");
return Done(&i[idx..], &i[0..idx])
}
println!("next");
}
return Error(0)
}
)
);
#[macro_export]
macro_rules! take_until_either(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + 1 > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+1));
for &t in $inp.iter() {
if i[idx] == t {
println!("next 2");
if idx + 1 > i.len() {
println!("next 3");
return Done("".as_bytes(), &i[0..idx])
} else {
println!("next 4");
return Done(&i[(idx+1)..], &i[0..idx])
}
}
}
println!("next");
}
return Error(0)
}
)
);
#[macro_export]
macro_rules! take_until_either_and_leave(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + 1 > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+1));
for &t in $inp.iter() {
println!("testing el = {:?} and t = {:?}", i[idx], t);
if i[idx] == t {
println!("next 2");
return Done(&i[idx..], &i[0..idx])
}
}
println!("next");
}
return Error(0)
}
)
);
#[cfg(test)]
mod tests {
use super::*;
use map::*;
use internal::IResult;
use internal::IResult::*;
#[test]
fn tag_closure() {
let x = tag_cl("abcd".as_bytes());
let r = x("abcdabcdefgh".as_bytes());
assert_eq!(r, Done("abcdefgh".as_bytes(), "abcd".as_bytes()));
let r2 = x("abcefgh".as_bytes());
assert_eq!(r2, Error(0));
}
#[test]
fn character() {
let empty = "".as_bytes();
let a = "abcd".as_bytes();
let b = "1234".as_bytes();
let c = "a123".as_bytes();
let d = "azé12".as_bytes();
let e = " ".as_bytes();
assert_eq!(alpha(a), Done(empty, a));
assert_eq!(alpha(b), Done(b, empty));
assert_eq!(alpha(c), Done(&c[1..], "a".as_bytes()));
assert_eq!(alpha(d), Done("é12".as_bytes(), "az".as_bytes()));
assert_eq!(digit(a), Done(a, empty));
assert_eq!(digit(b), Done(empty, b));
assert_eq!(digit(c), Done(c, empty));
assert_eq!(digit(d), Done(d, empty));
assert_eq!(alphanumeric(a), Done(empty, a));
assert_eq!(alphanumeric(b), Done(empty, b));
assert_eq!(alphanumeric(c), Done(empty, c));
assert_eq!(alphanumeric(d), Done("é12".as_bytes(), "az".as_bytes()));
assert_eq!(space(e), Done("".as_bytes(), " ".as_bytes()));
}
#[test]
fn is_not() {
let a = "ab12cd\nefgh".as_bytes();
assert_eq!(not_line_ending(a), Done("\nefgh".as_bytes(), "ab12cd".as_bytes()));
let b = "ab12cd\nefgh\nijkl".as_bytes();
assert_eq!(not_line_ending(b), Done("\nefgh\nijkl".as_bytes(), "ab12cd".as_bytes()));
}
#[test]
fn buffer_with_size() {
let i:Vec<u8> = vec![7,8];
let o:Vec<u8> = vec![4,5,6];
let arr:[u8; 6us] = [3, 4, 5, 6, 7, 8];
let res = sized_buffer(&arr[]);
assert_eq!(res, Done(&i[], &o[]))
}
/*#[test]
fn t1() {
let v1:Vec<u8> = vec![1,2,3];
let v2:Vec<u8> = vec![4,5,6];
let d = Done(&v1[], &v2[]);
let res = d.flat_map(print);
assert_eq!(res, Done(&v2[], ()));
}*/
#[derive(PartialEq,Eq,Debug)]
struct B {
a: u8,
b: u8
}
#[test]
fn chain_and_ignore() {
tag!(x "abcd".as_bytes());
tag!(y "efgh".as_bytes());
fn ret_int(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
//o!(z<&[u8], int> x S x S retInt Z y);
o!(z<&[u8], u8> x ~ x ~ [ ret_int ] ~ y);
let r = z("abcdabcdefgh".as_bytes());
assert_eq!(r, Done("".as_bytes(), 1));
}
#[test]
fn chain() {
tag!(x "abcd".as_bytes());
fn temp_ret_int1(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
o!(ret_int1<&[u8],u8> x ~ [ temp_ret_int1 ]);
fn ret_int2(i:&[u8]) -> IResult<&[u8], u8> { Done(i,2) };
chain!(f<&[u8],B>, ||{B{a: aa, b: bb}}, aa: ret_int1 ~ bb: ret_int2);
let r = f("abcde".as_bytes());
assert_eq!(r, Done("e".as_bytes(), B{a: 1, b: 2}));
}
#[test]
fn chain2() {
tag!(x "abcd".as_bytes());
tag!(y "efgh".as_bytes());
//fn temp_ret_int1(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
//o!(ret_int1<&[u8],u8> x ~ [ temp_ret_int1 ]);
fn ret_int1(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
fn ret_int2(i:&[u8]) -> IResult<&[u8], u8> { Done(i,2) };
chain!(f<&[u8],B>, ||{B{a: aa, b: bb}}, x ~ x ~ aa: ret_int1 ~ y ~ bb: ret_int2 ~ y);
let r = f("abcdabcdefghefghX".as_bytes());
assert_eq!(r, Done("X".as_bytes(), B{a: 1, b: 2}));
}
#[test]
fn alt() {
fn work(input: &[u8]) -> IResult<&[u8],&[u8]> {
Done("".as_bytes(), input)
}
#[allow(unused_variables)]
fn dont_work(input: &[u8]) -> IResult<&[u8],&[u8]> {
Error(3)
}
fn work2(input: &[u8]) -> IResult<&[u8],&[u8]> {
Done(input, "".as_bytes())
}
alt!(alt1<&[u8],&[u8]>, dont_work | dont_work);
alt!(alt2<&[u8],&[u8]>, dont_work | work);
alt!(alt3<&[u8],&[u8]>, dont_work | dont_work | work2 | dont_work);
let a = "abcd".as_bytes();
assert_eq!(alt1(a), Error(1));
assert_eq!(alt2(a), Done("".as_bytes(), a));
assert_eq!(alt3(a), Done(a, "".as_bytes()));
}
#[test]
fn opt() {
tag!(x "abcd".as_bytes());
opt!(o<&[u8],&[u8]> x);
let a = "abcdef".as_bytes();
let b = "bcdefg".as_bytes();
assert_eq!(o(a), Done("ef".as_bytes(), Some("abcd".as_bytes())));
assert_eq!(o(b), Done("bcdefg".as_bytes(), None));
}
#[test]
fn many0() {
tag!(x "abcd".as_bytes());
many0!(multi<&[u8],&[u8]> x);
let a = "abcdef".as_bytes();
let b = "abcdabcdef".as_bytes();
let c = "azerty".as_bytes();
let res1 = vec!["abcd".as_bytes()];
assert_eq!(multi(a), Done("ef".as_bytes(), res1));
let res2 = vec!["abcd".as_bytes(), "abcd".as_bytes()];
assert_eq!(multi(b), Done("ef".as_bytes(), res2));
assert_eq!(multi(c), Done("azerty".as_bytes(), Vec::new()));
}
#[test]
fn many1() {
tag!(x "abcd".as_bytes());
many1!(multi<&[u8],&[u8]> x);
let a = "abcdef".as_bytes();
let b = "abcdabcdef".as_bytes();
let c = "azerty".as_bytes();
let res1 = vec!["abcd".as_bytes()];
assert_eq!(multi(a), Done("ef".as_bytes(), res1));
let res2 = vec!["abcd".as_bytes(), "abcd".as_bytes()];
assert_eq!(multi(b), Done("ef".as_bytes(), res2));
assert_eq!(multi(c), Error(0));
}
#[test]
fn length_value_test() {
let i1 = vec![7,8];
let o1 = vec![4, 5, 6];
let arr1:[u8; 6us] = [3, 4, 5, 6, 7, 8];
let res1 = length_value(&arr1);
assert_eq!(Done(&i1[], &o1[]), res1);
let i2:Vec<u8> = vec![4,5,6,7,8];
let o2 = "";
let arr2:[u8; 6us] = [0, 4, 5, 6, 7, 8];
let res2 = length_value(&arr2);
assert_eq!(Done(&i2[], o2.as_bytes()), res2);
let arr3:[u8; 7us] = [8, 4, 5, 6, 7, 8, 9];
let res3 = length_value(&arr3);
//FIXME: should be incomplete
assert_eq!(Error(0), res3);
}
#[test]
fn take_until_test() {
take_until!(x "efgh".as_bytes());
let r = x("abcdabcdefghijkl".as_bytes());
assert_eq!(r, Done("ijkl".as_bytes(), "abcdabcd".as_bytes()));
println!("Done 1\n");
let r2 = x("abcdabcdefgh".as_bytes());
assert_eq!(r2, Done("".as_bytes(), "abcdabcd".as_bytes()));
println!("Done 2\n");
let r3 = x("abcefg".as_bytes());
assert_eq!(r3, Incomplete(0));
}
}
document the new chaining macro
//! Useful parser combinators
//!
//! A number of useful parser combinators have already been implemented.
//! Some of them use macros, other are implemented through functions.
//! Hopefully, the syntax will converge to onely one way in the future,
//! but the macros system makes no promises.
//!
extern crate collections;
use std::fmt::Debug;
use internal::*;
use internal::IResult::*;
/// declares a byte array as a suite to recognize
///
/// consumes the recognized characters
///
/// ```ignore
/// tag!(x "abcd".as_bytes());
/// let r = Done((), "abcdabcdefgh".as_bytes()).flat_map(x);
/// assert_eq!(r, Done("efgh".as_bytes(), "abcd".as_bytes()));
/// ```
#[macro_export]
macro_rules! tag(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
if i.len() >= $inp.len() && i.slice(0, $inp.len()) == $inp {
Done(&i[$inp.len()..], &i[0..$inp.len()])
} else {
Error(0)
}
}
)
);
pub fn tag_cl<'a,'b>(rec:&'a[u8]) -> Box<Fn(&'b[u8]) -> IResult<&'b[u8], &'b[u8]>> {
Box::new(move |i: &'b[u8]| -> IResult<&'b[u8], &'b[u8]> {
if i.len() >= rec.len() && i.slice(0, rec.len()) == rec {
Done(&i[rec.len()..], &i[0..rec.len()])
} else {
Error(0)
}
})
}
/// chains parsers and returns the result of only one of them
///
/// ```ignore
/// tag!(x "abcd".as_bytes());
/// tag!(y "efgh".as_bytes());
///
/// fn ret_int(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
///
/// // parse the x tag two times, return an int, parse the y tag
/// o!(z<&[u8], u8> x ~ x ~ [ ret_int ] ~ y);
///
/// let r = Done((), "abcdabcdefgh".as_bytes()).flat_map(z);
/// assert_eq!(r, Done("".as_bytes(), 1));
/// ```
#[macro_export]
macro_rules! o(
($name:ident<$i:ty,$o:ty> $f1:ident ~ $($rest:tt)*) => (
#[allow(unused_variables)]
fn $name(input:$i) -> IResult<$i, $o>{
match $f1(input) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,o) => {
o_parser!(i ~ o ~ $($rest)*)
}
}
}
);
);
#[macro_export]
macro_rules! o_parser(
($i:ident ~ $o:ident ~ [ $e:ident ] ~ $s:ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,o) => {
match $s(i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i2) => IResult::Incomplete(i2),//IResult::Incomplete(i),
IResult::Done(i2,o2) => {
IResult::Done(i2, o)
}
}
}
}
);
($i:ident ~ $o:ident ~ [ $e:ident ] ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,o) => {
o_parser!(i ~ o ~ $($rest)*)
}
}
);
($i:ident ~ $o:ident ~ [ $e:ident ]) => (
$e($i)
);
($i:ident ~ $o:ident ~ $e:ident ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,_) => {
o_parser!(i ~ $o ~ $($rest)*)
}
}
);
($i:ident ~ $o:ident ~ $e:ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),//IResult::Incomplete(i),
IResult::Done(i,_) => {
IResult::Done(i, $o)
}
}
);
($i:ident ~ $o:ident) => (Done($i,$o));
);
/// chains parsers and assemble the results through a closure
///
/// ```ignore
/// #[derive(PartialEq,Eq,Debug)]
/// struct B {
/// a: u8,
/// b: u8
/// }
///
/// tag!(x "abcd".as_bytes());
/// tag!(y "efgh".as_bytes());
///
/// fn ret_int(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
///
/// chain!(z<&[u8], u8>, || ||{B{a: aa, b: bb}},
/// x ~
/// x ~
/// aa: ret_int ~
/// y ~
/// bb: ret_int
/// );
///
/// let r = z("abcdabcdefgh".as_bytes());
/// assert_eq!(r, Done("".as_bytes(), B{a: 1, b: 1}));
/// ```
#[macro_export]
macro_rules! chain (
($name:ident<$i:ty,$o:ty>, $assemble:expr, $($rest:tt)*) => (
#[allow(unused_variables)]
fn $name(i:$i) -> IResult<$i,$o>{
chaining_parser!(i, $assemble, $($rest)*)
}
);
);
#[macro_export]
macro_rules! chaining_parser (
($i:expr, $assemble:expr, $e:ident ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,_) => {
chaining_parser!(i, $assemble, $($rest)*)
}
}
);
($i:expr, $assemble:expr, $field:ident : $e:ident ~ $($rest:tt)*) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,o) => {
let $field = o;
chaining_parser!(i, $assemble, $($rest)*)
}
}
);
($i:expr, $assemble:expr, $field:ident : $e:ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,o) => {
let $field = o;
IResult::Done(i, $assemble())
}
}
);
($i:expr, $assemble:expr, $e: ident) => (
match $e($i) {
IResult::Error(e) => IResult::Error(e),
IResult::Incomplete(i) => IResult::Incomplete(i),
IResult::Done(i,_) => {
IResult::Done(i, $assemble())
}
}
);
($i:expr, $assemble:expr,) => (
IResult::Done($i, $assemble())
)
);
#[macro_export]
macro_rules! alt (
($name:ident<$i:ty,$o:ty>, $($rest:tt)*) => (
fn $name(i:$i) -> IResult<$i,$o>{
alt_parser!(i | $($rest)*)
}
);
);
#[macro_export]
macro_rules! alt_parser (
($i:ident | $e:ident | $($rest:tt)*) => (
match $e($i) {
IResult::Error(_) => alt_parser!($i | $($rest)*),
IResult::Incomplete(_) => alt_parser!($i | $($rest)*),
IResult::Done(i,o) => IResult::Done(i,o)
}
);
($i:ident | $e:ident) => (
match $e($i) {
IResult::Error(_) => alt_parser!($i),
IResult::Incomplete(_) => alt_parser!($i),
IResult::Done(i,o) => IResult::Done(i,o)
}
);
($i:ident) => (
IResult::Error(1)
)
);
pub fn print<'y,T: Debug>(input: T) -> IResult<'y,T, ()> {
println!("{:?}", input);
Done(input, ())
}
pub fn begin<'a,'y>(input: &'a [u8]) -> IResult<'y,(), &'a [u8]> {
Done((), input)
}
#[macro_export]
macro_rules! is_not(
($name:ident $arr:expr) => (
fn $name(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in range(0, input.len()) {
for &i in $arr.iter() {
if input[idx] == i {
return IResult::Done(input.slice_from(idx), input.slice(0, idx))
}
}
}
IResult::Done("".as_bytes(), input)
}
)
);
#[macro_export]
macro_rules! is_a(
($name:ident $arr:expr) => (
fn $name(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in range(0, input.len()) {
var res = false
for &i in $arr.iter() {
if input[idx] == i {
res = true
}
}
if !res {
return IResult::Done(input.slice_from(idx), input.slice(0, idx))
}
}
IResult::Done("".as_bytes(), input)
}
)
);
#[macro_export]
macro_rules! filter(
($name:ident $f:ident) => (
fn $name(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !$f(input[idx]) {
return IResult::Done(&input[idx..], &input[0..idx])
}
}
IResult::Done("".as_bytes(), input)
}
)
);
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
// public methods
//pub is_not!(line_ending "\r\n".as_bytes())
pub fn not_line_ending(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
for &i in "\r\n".as_bytes().iter() {
if input[idx] == i {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done("".as_bytes(), input)
}
tag!(tag_ln "\n".as_bytes());
pub fn line_ending(input:&[u8]) -> IResult<&[u8], &[u8]> {
tag_ln(input)
}
pub fn is_alphabetic(chr:u8) -> bool {
(chr >= 0x41 && chr <= 0x5A) || (chr >= 0x61 && chr <= 0x7A)
}
pub fn is_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x39
}
pub fn is_alphanumeric(chr: u8) -> bool {
is_alphabetic(chr) || is_digit(chr)
}
pub fn is_space(chr:u8) -> bool {
chr == ' ' as u8 || chr == '\t' as u8
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
//pub filter!(alpha is_alphabetic)
//pub filter!(digit is_digit)
//pub filter!(alphanumeric is_alphanumeric)
pub fn alpha(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphabetic(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn digit(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_digit(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn alphanumeric(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphanumeric(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn space(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_space(input[idx]) {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn multispace(input:&[u8]) -> IResult<&[u8], &[u8]> {
for idx in 0..input.len() {
if !is_space(input[idx]) && input[idx] != '\r' as u8 && input[idx] != '\n' as u8 {
return Done(&input[idx..], &input[0..idx])
}
}
Done("".as_bytes(), input)
}
pub fn sized_buffer(input:&[u8]) -> IResult<&[u8], &[u8]> {
if input.len() == 0 {
return Incomplete(0)
}
let len = input[0] as usize;
if input.len() >= len + 1 {
return Done(&input[len+1..], &input[1..len+1])
} else {
return Incomplete(0)
}
}
#[macro_export]
macro_rules! opt(
($name:ident<$i:ty,$o:ty> $f:ident) => (
fn $name(input:$i) -> IResult<$i, Option<$o>> {
match $f(input) {
IResult::Done(i,o) => IResult::Done(i, Some(o)),
_ => IResult::Done(input, None)
}
}
)
);
// 0 or more
#[macro_export]
macro_rules! many0(
($name:ident<$i:ty,$o:ty> $f:ident) => (
fn $name(input:$i) -> IResult<$i,Vec<$o>> {
let mut begin = 0;
let mut remaining = input.len();
let mut res: Vec<$o> = Vec::new();
loop {
match $f(input.slice_from(begin)) {
IResult::Done(i,o) => {
res.push(o);
begin += remaining - i.len();
remaining = i.len();
if begin >= input.len() {
return IResult::Done(i, res)
}
},
_ => {
return IResult::Done(input.slice_from(begin), res)
}
}
}
}
)
);
// one or more
#[macro_export]
macro_rules! many1(
($name:ident<$i:ty,$o:ty> $f:ident) => (
fn $name(input:$i) -> IResult<$i,Vec<$o>> {
let mut begin = 0;
let mut remaining = input.len();
let mut res: Vec<$o> = Vec::new();
loop {
match $f(input.slice_from(begin)) {
IResult::Done(i,o) => {
res.push(o);
begin += remaining - i.len();
remaining = i.len();
if begin >= input.len() {
return IResult::Done(i, res)
}
},
_ => {
if begin == 0 {
return IResult::Error(0)
} else {
return IResult::Done(input.slice_from(begin), res)
}
}
}
}
}
)
);
#[macro_export]
macro_rules! fold0(
($name:ident<$i:ty,$o:ty>, $assemble:expr, $f:ident) => (
fn $name(input:$i, z:$o) -> IResult<$i,$o> {
fold0_impl!(<$i, $o>, $assemble, $f, input, z);
}
);
);
#[macro_export]
macro_rules! fold0_impl(
(<$i:ty,$o:ty>, $assemble:expr, $f:ident, $input:ident, $z:ident) => (
{
let mut begin = 0;
let mut remaining = $input.len();
let mut res: $o = $z;
loop {
match $f($input.slice_from(begin)) {
IResult::Done(i,o) => {
//res.push(o);
res = $assemble(res, o);
begin += remaining - i.len();
remaining = i.len();
if begin >= $input.len() {
return IResult::Done(i, res)
}
},
_ => {
return IResult::Done($input.slice_from(begin), res)
}
}
}
}
);
);
#[macro_export]
macro_rules! fold1(
($name:ident<$i:ty,$o:ty>, $assemble:expr, $f:ident) => (
fn $name(input:$i, z:$o) -> IResult<$i,$o> {
fold1_impl!(<$i, $o>, $assemble, $f, input, z);
}
);
);
#[macro_export]
macro_rules! fold1_impl(
(<$i:ty,$o:ty>, $assemble:expr, $f:ident, $input:ident, $z:ident) => (
{
let mut begin = 0;
let mut remaining = $input.len();
let mut res: $o = $z;
loop {
match $f($input.slice_from(begin)) {
IResult::Done(i,o) => {
//res.push(o);
res = $assemble(res, o);
begin += remaining - i.len();
remaining = i.len();
if begin >= $input.len() {
return IResult::Done(i, res)
}
},
_ => {
if begin == 0 {
return IResult::Error(0)
} else {
return IResult::Done($input.slice_from(begin), res)
}
}
}
}
}
);
);
pub fn length_value(input:&[u8]) -> IResult<&[u8], &[u8]> {
let input_len = input.len();
if input_len == 0 {
return IResult::Error(0)
}
let len = input[0] as usize;
if input_len - 1 >= len {
return IResult::Done(&input[len+1..], &input[1..len+1])
} else {
// FIXME: return Incomplete
return IResult::Error(0)
}
}
#[macro_export]
macro_rules! take(
($name:ident $count:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
if i.len() < $count {
Incomplete(0)
} else {
Done(&i[$count..],&i[0..$count])
}
}
)
);
#[macro_export]
macro_rules! take_until(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + $inp.len() > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+$inp.len()));
if i.slice(idx, idx+$inp.len()) == $inp {
println!("next 2");
if idx + $inp.len() > i.len() {
println!("next 3");
return Done("".as_bytes(), &i[0..idx])
} else {
println!("next 4");
return Done(&i[(idx+$inp.len())..], &i[0..idx])
}
}
println!("next");
}
return Error(0)
}
)
);
#[macro_export]
macro_rules! take_until_and_leave(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + $inp.len() > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+$inp.len()));
if i.slice(idx, idx+$inp.len()) == $inp {
println!("next 2");
return Done(&i[idx..], &i[0..idx])
}
println!("next");
}
return Error(0)
}
)
);
#[macro_export]
macro_rules! take_until_either(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + 1 > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+1));
for &t in $inp.iter() {
if i[idx] == t {
println!("next 2");
if idx + 1 > i.len() {
println!("next 3");
return Done("".as_bytes(), &i[0..idx])
} else {
println!("next 4");
return Done(&i[(idx+1)..], &i[0..idx])
}
}
}
println!("next");
}
return Error(0)
}
)
);
#[macro_export]
macro_rules! take_until_either_and_leave(
($name:ident $inp:expr) => (
fn $name(i:&[u8]) -> IResult<&[u8], &[u8]>{
for idx in 0..i.len() {
println!("inp: {:?} i: {:?} idx: {:?}", $inp.len(), i.len(), idx);
if idx + 1 > i.len() {
return Incomplete(0)
}
println!("next 1: {:?}", i.slice(idx, idx+1));
for &t in $inp.iter() {
println!("testing el = {:?} and t = {:?}", i[idx], t);
if i[idx] == t {
println!("next 2");
return Done(&i[idx..], &i[0..idx])
}
}
println!("next");
}
return Error(0)
}
)
);
#[cfg(test)]
mod tests {
use super::*;
use map::*;
use internal::IResult;
use internal::IResult::*;
#[test]
fn tag_closure() {
let x = tag_cl("abcd".as_bytes());
let r = x("abcdabcdefgh".as_bytes());
assert_eq!(r, Done("abcdefgh".as_bytes(), "abcd".as_bytes()));
let r2 = x("abcefgh".as_bytes());
assert_eq!(r2, Error(0));
}
#[test]
fn character() {
let empty = "".as_bytes();
let a = "abcd".as_bytes();
let b = "1234".as_bytes();
let c = "a123".as_bytes();
let d = "azé12".as_bytes();
let e = " ".as_bytes();
assert_eq!(alpha(a), Done(empty, a));
assert_eq!(alpha(b), Done(b, empty));
assert_eq!(alpha(c), Done(&c[1..], "a".as_bytes()));
assert_eq!(alpha(d), Done("é12".as_bytes(), "az".as_bytes()));
assert_eq!(digit(a), Done(a, empty));
assert_eq!(digit(b), Done(empty, b));
assert_eq!(digit(c), Done(c, empty));
assert_eq!(digit(d), Done(d, empty));
assert_eq!(alphanumeric(a), Done(empty, a));
assert_eq!(alphanumeric(b), Done(empty, b));
assert_eq!(alphanumeric(c), Done(empty, c));
assert_eq!(alphanumeric(d), Done("é12".as_bytes(), "az".as_bytes()));
assert_eq!(space(e), Done("".as_bytes(), " ".as_bytes()));
}
#[test]
fn is_not() {
let a = "ab12cd\nefgh".as_bytes();
assert_eq!(not_line_ending(a), Done("\nefgh".as_bytes(), "ab12cd".as_bytes()));
let b = "ab12cd\nefgh\nijkl".as_bytes();
assert_eq!(not_line_ending(b), Done("\nefgh\nijkl".as_bytes(), "ab12cd".as_bytes()));
}
#[test]
fn buffer_with_size() {
let i:Vec<u8> = vec![7,8];
let o:Vec<u8> = vec![4,5,6];
let arr:[u8; 6us] = [3, 4, 5, 6, 7, 8];
let res = sized_buffer(&arr[]);
assert_eq!(res, Done(&i[], &o[]))
}
/*#[test]
fn t1() {
let v1:Vec<u8> = vec![1,2,3];
let v2:Vec<u8> = vec![4,5,6];
let d = Done(&v1[], &v2[]);
let res = d.flat_map(print);
assert_eq!(res, Done(&v2[], ()));
}*/
#[derive(PartialEq,Eq,Debug)]
struct B {
a: u8,
b: u8
}
#[test]
fn chain_and_ignore() {
tag!(x "abcd".as_bytes());
tag!(y "efgh".as_bytes());
fn ret_int(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
//o!(z<&[u8], int> x S x S retInt Z y);
o!(z<&[u8], u8> x ~ x ~ [ ret_int ] ~ y);
let r = z("abcdabcdefgh".as_bytes());
assert_eq!(r, Done("".as_bytes(), 1));
}
#[test]
fn chain() {
tag!(x "abcd".as_bytes());
fn temp_ret_int1(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
o!(ret_int1<&[u8],u8> x ~ [ temp_ret_int1 ]);
fn ret_int2(i:&[u8]) -> IResult<&[u8], u8> { Done(i,2) };
chain!(f<&[u8],B>, ||{B{a: aa, b: bb}}, aa: ret_int1 ~ bb: ret_int2);
let r = f("abcde".as_bytes());
assert_eq!(r, Done("e".as_bytes(), B{a: 1, b: 2}));
}
#[test]
fn chain2() {
tag!(x "abcd".as_bytes());
tag!(y "efgh".as_bytes());
//fn temp_ret_int1(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
//o!(ret_int1<&[u8],u8> x ~ [ temp_ret_int1 ]);
fn ret_int1(i:&[u8]) -> IResult<&[u8], u8> { Done(i,1) };
fn ret_int2(i:&[u8]) -> IResult<&[u8], u8> { Done(i,2) };
chain!(f<&[u8],B>, ||{B{a: aa, b: bb}}, x ~ x ~ aa: ret_int1 ~ y ~ bb: ret_int2 ~ y);
let r = f("abcdabcdefghefghX".as_bytes());
assert_eq!(r, Done("X".as_bytes(), B{a: 1, b: 2}));
}
#[test]
fn alt() {
fn work(input: &[u8]) -> IResult<&[u8],&[u8]> {
Done("".as_bytes(), input)
}
#[allow(unused_variables)]
fn dont_work(input: &[u8]) -> IResult<&[u8],&[u8]> {
Error(3)
}
fn work2(input: &[u8]) -> IResult<&[u8],&[u8]> {
Done(input, "".as_bytes())
}
alt!(alt1<&[u8],&[u8]>, dont_work | dont_work);
alt!(alt2<&[u8],&[u8]>, dont_work | work);
alt!(alt3<&[u8],&[u8]>, dont_work | dont_work | work2 | dont_work);
let a = "abcd".as_bytes();
assert_eq!(alt1(a), Error(1));
assert_eq!(alt2(a), Done("".as_bytes(), a));
assert_eq!(alt3(a), Done(a, "".as_bytes()));
}
#[test]
fn opt() {
tag!(x "abcd".as_bytes());
opt!(o<&[u8],&[u8]> x);
let a = "abcdef".as_bytes();
let b = "bcdefg".as_bytes();
assert_eq!(o(a), Done("ef".as_bytes(), Some("abcd".as_bytes())));
assert_eq!(o(b), Done("bcdefg".as_bytes(), None));
}
#[test]
fn many0() {
tag!(x "abcd".as_bytes());
many0!(multi<&[u8],&[u8]> x);
let a = "abcdef".as_bytes();
let b = "abcdabcdef".as_bytes();
let c = "azerty".as_bytes();
let res1 = vec!["abcd".as_bytes()];
assert_eq!(multi(a), Done("ef".as_bytes(), res1));
let res2 = vec!["abcd".as_bytes(), "abcd".as_bytes()];
assert_eq!(multi(b), Done("ef".as_bytes(), res2));
assert_eq!(multi(c), Done("azerty".as_bytes(), Vec::new()));
}
#[test]
fn many1() {
tag!(x "abcd".as_bytes());
many1!(multi<&[u8],&[u8]> x);
let a = "abcdef".as_bytes();
let b = "abcdabcdef".as_bytes();
let c = "azerty".as_bytes();
let res1 = vec!["abcd".as_bytes()];
assert_eq!(multi(a), Done("ef".as_bytes(), res1));
let res2 = vec!["abcd".as_bytes(), "abcd".as_bytes()];
assert_eq!(multi(b), Done("ef".as_bytes(), res2));
assert_eq!(multi(c), Error(0));
}
#[test]
fn length_value_test() {
let i1 = vec![7,8];
let o1 = vec![4, 5, 6];
let arr1:[u8; 6us] = [3, 4, 5, 6, 7, 8];
let res1 = length_value(&arr1);
assert_eq!(Done(&i1[], &o1[]), res1);
let i2:Vec<u8> = vec![4,5,6,7,8];
let o2 = "";
let arr2:[u8; 6us] = [0, 4, 5, 6, 7, 8];
let res2 = length_value(&arr2);
assert_eq!(Done(&i2[], o2.as_bytes()), res2);
let arr3:[u8; 7us] = [8, 4, 5, 6, 7, 8, 9];
let res3 = length_value(&arr3);
//FIXME: should be incomplete
assert_eq!(Error(0), res3);
}
#[test]
fn take_until_test() {
take_until!(x "efgh".as_bytes());
let r = x("abcdabcdefghijkl".as_bytes());
assert_eq!(r, Done("ijkl".as_bytes(), "abcdabcd".as_bytes()));
println!("Done 1\n");
let r2 = x("abcdabcdefgh".as_bytes());
assert_eq!(r2, Done("".as_bytes(), "abcdabcd".as_bytes()));
println!("Done 2\n");
let r3 = x("abcefg".as_bytes());
assert_eq!(r3, Incomplete(0));
}
}
|
//! Useful parser combinators
//!
//! A number of useful parser combinators have already been implemented.
//! Some of them use macros, other are implemented through functions.
//! Hopefully, the syntax will converge to onely one way in the future,
//! but the macros system makes no promises.
//!
#[cfg(feature = "core")]
use std::prelude::v1::*;
use std::boxed::Box;
use std::fmt::Debug;
use internal::*;
use internal::IResult::*;
use internal::Err::*;
use util::ErrorCode;
use std::mem::transmute;
pub fn tag_cl<'a,'b>(rec:&'a[u8]) -> Box<Fn(&'b[u8]) -> IResult<'b, &'b[u8], &'b[u8]> + 'a> {
Box::new(move |i: &'b[u8]| -> IResult<'b, &'b[u8], &'b[u8]> {
if i.len() >= rec.len() && &i[0..rec.len()] == rec {
Done(&i[rec.len()..], &i[0..rec.len()])
} else {
Error(Position(ErrorCode::TagClosure as u32, i))
}
})
}
#[cfg(not(feature = "core"))]
pub fn print<'a,T: Debug>(input: T) -> IResult<'a,T, ()> {
println!("{:?}", input);
Done(input, ())
}
pub fn begin<'a>(input: &'a [u8]) -> IResult<'a,(), &'a [u8]> {
Done((), input)
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
// public methods
//pub is_not!(line_ending b"\r\n")
pub fn not_line_ending<'a>(input:&[u8]) -> IResult<'a, &[u8], &[u8]> {
for idx in 0..input.len() {
for &i in b"\r\n".iter() {
if input[idx] == i {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
named!(tag_ln, tag!("\n"));
pub fn line_ending<'a>(input:&'a[u8]) -> IResult<'a, &[u8], &[u8]> {
tag_ln(input)
}
pub fn is_alphabetic(chr:u8) -> bool {
(chr >= 0x41 && chr <= 0x5A) || (chr >= 0x61 && chr <= 0x7A)
}
pub fn is_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x39
}
pub fn is_alphanumeric(chr: u8) -> bool {
is_alphabetic(chr) || is_digit(chr)
}
pub fn is_space(chr:u8) -> bool {
chr == ' ' as u8 || chr == '\t' as u8
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
//pub filter!(alpha is_alphabetic)
//pub filter!(digit is_digit)
//pub filter!(alphanumeric is_alphanumeric)
pub fn alpha<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphabetic(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::Alpha as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn digit<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_digit(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::Digit as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn alphanumeric<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphanumeric(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::AlphaNumeric as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn space<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_space(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::Space as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn multispace<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
// println!("multispace at index: {}", idx);
if !is_space(input[idx]) && input[idx] != '\r' as u8 && input[idx] != '\n' as u8 {
if idx == 0 {
return Error(Position(ErrorCode::MultiSpace as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn sized_buffer<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
if input.len() == 0 {
return Incomplete(Needed::Unknown)
}
let len = input[0] as usize;
if input.len() >= len + 1 {
return Done(&input[len+1..], &input[1..len+1])
} else {
return Incomplete(Needed::Size(1 + len))
}
}
pub fn length_value<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
let input_len = input.len();
if input_len == 0 {
return Error(Position(ErrorCode::LengthValueFn as u32, input))
}
let len = input[0] as usize;
if input_len - 1 >= len {
return IResult::Done(&input[len+1..], &input[1..len+1])
} else {
// FIXME: return Incomplete
return IResult::Incomplete(Needed::Size(1+len))
}
}
pub fn be_u8<'a>(i: &[u8]) -> IResult<'a,&[u8], u8> {
if i.len() < 1 {
Incomplete(Needed::Size(1))
} else {
Done(&i[1..], i[0])
}
}
pub fn be_u16<'a>(i: &[u8]) -> IResult<'a,&[u8], u16> {
if i.len() < 2 {
Incomplete(Needed::Size(2))
} else {
let res = ((i[0] as u16) << 8) + i[1] as u16;
Done(&i[2..], res)
}
}
pub fn be_u32<'a>(i: &[u8]) -> IResult<'a,&[u8], u32> {
if i.len() < 4 {
Incomplete(Needed::Size(4))
} else {
let res = ((i[0] as u32) << 24) + ((i[1] as u32) << 16) + ((i[2] as u32) << 8) + i[3] as u32;
Done(&i[4..], res)
}
}
pub fn be_u64<'a>(i: &[u8]) -> IResult<'a,&[u8], u64> {
if i.len() < 8 {
Incomplete(Needed::Size(8))
} else {
let res = ((i[0] as u64) << 56) + ((i[1] as u64) << 48) + ((i[2] as u64) << 40) + ((i[3] as u64) << 32) +
((i[4] as u64) << 24) + ((i[5] as u64) << 16) + ((i[6] as u64) << 8) + i[7] as u64;
Done(&i[8..], res)
}
}
pub fn be_i8<'a>(i:&'a [u8]) -> IResult<&'a [u8], i8> {
map!(i, be_u8, | x | { x as i8 })
}
pub fn be_i16<'a>(i:&'a [u8]) -> IResult<&'a [u8], i16> {
map!(i, be_u16, | x | { x as i16 })
}
pub fn be_i32<'a>(i:&'a [u8]) -> IResult<&'a [u8], i32> {
map!(i, be_u32, | x | { x as i32 })
}
pub fn be_i64<'a>(i:&'a [u8]) -> IResult<&'a [u8], i64> {
map!(i, be_u64, | x | { x as i64 })
}
pub fn le_u8(i: &[u8]) -> IResult<&[u8], u8> {
if i.len() < 1 {
Incomplete(Needed::Size(1))
} else {
Done(&i[1..], i[0])
}
}
pub fn le_u16(i: &[u8]) -> IResult<&[u8], u16> {
if i.len() < 2 {
Incomplete(Needed::Size(2))
} else {
let res = ((i[1] as u16) << 8) + i[0] as u16;
Done(&i[2..], res)
}
}
pub fn le_u32(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 4 {
Incomplete(Needed::Size(4))
} else {
let res = ((i[3] as u32) << 24) + ((i[2] as u32) << 16) + ((i[1] as u32) << 8) + i[0] as u32;
Done(&i[4..], res)
}
}
pub fn le_u64(i: &[u8]) -> IResult<&[u8], u64> {
if i.len() < 8 {
Incomplete(Needed::Size(8))
} else {
let res = ((i[7] as u64) << 56) + ((i[6] as u64) << 48) + ((i[5] as u64) << 40) + ((i[4] as u64) << 32) +
((i[3] as u64) << 24) + ((i[2] as u64) << 16) + ((i[1] as u64) << 8) + i[0] as u64;
Done(&i[8..], res)
}
}
pub fn be_f32(input: &[u8]) -> IResult<&[u8], f32> {
match be_u32(input) {
Error(e) => Error(e),
Incomplete(e) => Incomplete(e),
Done(i,o) => {
unsafe {
Done(i, transmute::<u32, f32>(o))
}
}
}
}
pub fn be_f64(input: &[u8]) -> IResult<&[u8], f64> {
match be_u64(input) {
Error(e) => Error(e),
Incomplete(e) => Incomplete(e),
Done(i,o) => {
unsafe {
Done(i, transmute::<u64, f64>(o))
}
}
}
}
pub fn eof(input:&[u8]) -> IResult<&[u8], &[u8]> {
if input.len() == 0 {
Done(input, input)
} else {
Error(Position(ErrorCode::Eof as u32, input))
}
}
#[cfg(test)]
mod tests {
use super::*;
use internal::Needed;
use internal::IResult::*;
use internal::Err::*;
use util::ErrorCode;
#[test]
fn tag_closure() {
let x = tag_cl(&b"abcd"[..]);
let r = x(&b"abcdabcdefgh"[..]);
assert_eq!(r, Done(&b"abcdefgh"[..], &b"abcd"[..]));
let r2 = x(&b"abcefgh"[..]);
assert_eq!(r2, Error(Position(ErrorCode::TagClosure as u32, &b"abcefgh"[..])));
}
#[test]
fn character() {
let empty: &[u8] = b"";
let a: &[u8] = b"abcd";
let b: &[u8] = b"1234";
let c: &[u8] = b"a123";
let d: &[u8] = "azé12".as_bytes();
let e: &[u8] = b" ";
assert_eq!(alpha(a), Done(empty, a));
assert_eq!(alpha(b), Error(Position(ErrorCode::Alpha as u32,b)));
assert_eq!(alpha(c), Done(&c[1..], &b"a"[..]));
assert_eq!(alpha(d), Done("é12".as_bytes(), &b"az"[..]));
assert_eq!(digit(a), Error(Position(ErrorCode::Digit as u32,a)));
assert_eq!(digit(b), Done(empty, b));
assert_eq!(digit(c), Error(Position(ErrorCode::Digit as u32,c)));
assert_eq!(digit(d), Error(Position(ErrorCode::Digit as u32,d)));
assert_eq!(alphanumeric(a), Done(empty, a));
assert_eq!(alphanumeric(b), Done(empty, b));
assert_eq!(alphanumeric(c), Done(empty, c));
assert_eq!(alphanumeric(d), Done("é12".as_bytes(), &b"az"[..]));
assert_eq!(space(e), Done(&b""[..], &b" "[..]));
}
#[test]
fn is_not() {
let a: &[u8] = b"ab12cd\nefgh";
assert_eq!(not_line_ending(a), Done(&b"\nefgh"[..], &b"ab12cd"[..]));
let b: &[u8] = b"ab12cd\nefgh\nijkl";
assert_eq!(not_line_ending(b), Done(&b"\nefgh\nijkl"[..], &b"ab12cd"[..]));
let c: &[u8] = b"ab12cd";
assert_eq!(not_line_ending(c), Done(&b""[..], c));
}
#[test]
fn buffer_with_size() {
let i:Vec<u8> = vec![7,8];
let o:Vec<u8> = vec![4,5,6];
//let arr:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let arr:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let res = sized_buffer(&arr[..]);
assert_eq!(res, Done(&i[..], &o[..]))
}
/*#[test]
fn t1() {
let v1:Vec<u8> = vec![1,2,3];
let v2:Vec<u8> = vec![4,5,6];
let d = Done(&v1[..], &v2[..]);
let res = d.flat_map(print);
assert_eq!(res, Done(&v2[..], ()));
}*/
#[test]
fn length_value_test() {
let i1 = vec![7,8];
let o1 = vec![4, 5, 6];
let arr1:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let res1 = length_value(&arr1);
assert_eq!(Done(&i1[..], &o1[..]), res1);
let i2:Vec<u8> = vec![4,5,6,7,8];
let o2: &[u8] = b"";
let arr2:[u8; 6usize] = [0, 4, 5, 6, 7, 8];
let res2 = length_value(&arr2);
assert_eq!(Done(&i2[..], o2), res2);
let arr3:[u8; 7usize] = [8, 4, 5, 6, 7, 8, 9];
let res3 = length_value(&arr3);
//FIXME: should be incomplete
assert_eq!(Incomplete(Needed::Size(9)), res3);
}
#[test]
fn i8_tests() {
assert_eq!(be_i8(&[0x00]), Done(&b""[..], 0));
assert_eq!(be_i8(&[0x7f]), Done(&b""[..], 127));
assert_eq!(be_i8(&[0xff]), Done(&b""[..], -1));
assert_eq!(be_i8(&[0x80]), Done(&b""[..], -128));
}
#[test]
fn i16_tests() {
assert_eq!(be_i16(&[0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(be_i16(&[0x7f, 0xff]), Done(&b""[..], 32767_i16));
assert_eq!(be_i16(&[0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(be_i16(&[0x80, 0x00]), Done(&b""[..], -32768_i16));
}
#[test]
fn i32_tests() {
assert_eq!(be_i32(&[0x00, 0x00, 0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(be_i32(&[0x7f, 0xff, 0xff, 0xff]), Done(&b""[..], 2147483647_i32));
assert_eq!(be_i32(&[0xff, 0xff, 0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(be_i32(&[0x80, 0x00, 0x00, 0x00]), Done(&b""[..], -2147483648_i32));
}
#[test]
fn i64_tests() {
assert_eq!(be_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(be_i64(&[0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), Done(&b""[..], 9223372036854775807_i64));
assert_eq!(be_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(be_i64(&[0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), Done(&b""[..], -9223372036854775808_i64));
}
#[test]
fn end_of_input() {
let not_over = &b"Hello, world!"[..];
let is_over = &b""[..];
let res_not_over = eof(not_over);
assert_eq!(res_not_over, Error(Position(ErrorCode::Eof as u32, not_over)));
let res_over = eof(is_over);
assert_eq!(res_over, Done(is_over, is_over));
}
}
Add le_i* functions and tests
//! Useful parser combinators
//!
//! A number of useful parser combinators have already been implemented.
//! Some of them use macros, other are implemented through functions.
//! Hopefully, the syntax will converge to onely one way in the future,
//! but the macros system makes no promises.
//!
#[cfg(feature = "core")]
use std::prelude::v1::*;
use std::boxed::Box;
use std::fmt::Debug;
use internal::*;
use internal::IResult::*;
use internal::Err::*;
use util::ErrorCode;
use std::mem::transmute;
pub fn tag_cl<'a,'b>(rec:&'a[u8]) -> Box<Fn(&'b[u8]) -> IResult<'b, &'b[u8], &'b[u8]> + 'a> {
Box::new(move |i: &'b[u8]| -> IResult<'b, &'b[u8], &'b[u8]> {
if i.len() >= rec.len() && &i[0..rec.len()] == rec {
Done(&i[rec.len()..], &i[0..rec.len()])
} else {
Error(Position(ErrorCode::TagClosure as u32, i))
}
})
}
#[cfg(not(feature = "core"))]
pub fn print<'a,T: Debug>(input: T) -> IResult<'a,T, ()> {
println!("{:?}", input);
Done(input, ())
}
pub fn begin<'a>(input: &'a [u8]) -> IResult<'a,(), &'a [u8]> {
Done((), input)
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
// public methods
//pub is_not!(line_ending b"\r\n")
pub fn not_line_ending<'a>(input:&[u8]) -> IResult<'a, &[u8], &[u8]> {
for idx in 0..input.len() {
for &i in b"\r\n".iter() {
if input[idx] == i {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
named!(tag_ln, tag!("\n"));
pub fn line_ending<'a>(input:&'a[u8]) -> IResult<'a, &[u8], &[u8]> {
tag_ln(input)
}
pub fn is_alphabetic(chr:u8) -> bool {
(chr >= 0x41 && chr <= 0x5A) || (chr >= 0x61 && chr <= 0x7A)
}
pub fn is_digit(chr: u8) -> bool {
chr >= 0x30 && chr <= 0x39
}
pub fn is_alphanumeric(chr: u8) -> bool {
is_alphabetic(chr) || is_digit(chr)
}
pub fn is_space(chr:u8) -> bool {
chr == ' ' as u8 || chr == '\t' as u8
}
// FIXME: when rust-lang/rust#17436 is fixed, macros will be able to export
//pub filter!(alpha is_alphabetic)
//pub filter!(digit is_digit)
//pub filter!(alphanumeric is_alphanumeric)
pub fn alpha<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphabetic(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::Alpha as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn digit<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_digit(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::Digit as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn alphanumeric<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_alphanumeric(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::AlphaNumeric as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn space<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
if !is_space(input[idx]) {
if idx == 0 {
return Error(Position(ErrorCode::Space as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn multispace<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
for idx in 0..input.len() {
// println!("multispace at index: {}", idx);
if !is_space(input[idx]) && input[idx] != '\r' as u8 && input[idx] != '\n' as u8 {
if idx == 0 {
return Error(Position(ErrorCode::MultiSpace as u32, input))
} else {
return Done(&input[idx..], &input[0..idx])
}
}
}
Done(b"", input)
}
pub fn sized_buffer<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
if input.len() == 0 {
return Incomplete(Needed::Unknown)
}
let len = input[0] as usize;
if input.len() >= len + 1 {
return Done(&input[len+1..], &input[1..len+1])
} else {
return Incomplete(Needed::Size(1 + len))
}
}
pub fn length_value<'a>(input:&'a [u8]) -> IResult<'a,&'a [u8], &[u8]> {
let input_len = input.len();
if input_len == 0 {
return Error(Position(ErrorCode::LengthValueFn as u32, input))
}
let len = input[0] as usize;
if input_len - 1 >= len {
return IResult::Done(&input[len+1..], &input[1..len+1])
} else {
// FIXME: return Incomplete
return IResult::Incomplete(Needed::Size(1+len))
}
}
pub fn be_u8<'a>(i: &[u8]) -> IResult<'a,&[u8], u8> {
if i.len() < 1 {
Incomplete(Needed::Size(1))
} else {
Done(&i[1..], i[0])
}
}
pub fn be_u16<'a>(i: &[u8]) -> IResult<'a,&[u8], u16> {
if i.len() < 2 {
Incomplete(Needed::Size(2))
} else {
let res = ((i[0] as u16) << 8) + i[1] as u16;
Done(&i[2..], res)
}
}
pub fn be_u32<'a>(i: &[u8]) -> IResult<'a,&[u8], u32> {
if i.len() < 4 {
Incomplete(Needed::Size(4))
} else {
let res = ((i[0] as u32) << 24) + ((i[1] as u32) << 16) + ((i[2] as u32) << 8) + i[3] as u32;
Done(&i[4..], res)
}
}
pub fn be_u64<'a>(i: &[u8]) -> IResult<'a,&[u8], u64> {
if i.len() < 8 {
Incomplete(Needed::Size(8))
} else {
let res = ((i[0] as u64) << 56) + ((i[1] as u64) << 48) + ((i[2] as u64) << 40) + ((i[3] as u64) << 32) +
((i[4] as u64) << 24) + ((i[5] as u64) << 16) + ((i[6] as u64) << 8) + i[7] as u64;
Done(&i[8..], res)
}
}
pub fn be_i8<'a>(i:&'a [u8]) -> IResult<&'a [u8], i8> {
map!(i, be_u8, | x | { x as i8 })
}
pub fn be_i16<'a>(i:&'a [u8]) -> IResult<&'a [u8], i16> {
map!(i, be_u16, | x | { x as i16 })
}
pub fn be_i32<'a>(i:&'a [u8]) -> IResult<&'a [u8], i32> {
map!(i, be_u32, | x | { x as i32 })
}
pub fn be_i64<'a>(i:&'a [u8]) -> IResult<&'a [u8], i64> {
map!(i, be_u64, | x | { x as i64 })
}
pub fn le_u8(i: &[u8]) -> IResult<&[u8], u8> {
if i.len() < 1 {
Incomplete(Needed::Size(1))
} else {
Done(&i[1..], i[0])
}
}
pub fn le_u16(i: &[u8]) -> IResult<&[u8], u16> {
if i.len() < 2 {
Incomplete(Needed::Size(2))
} else {
let res = ((i[1] as u16) << 8) + i[0] as u16;
Done(&i[2..], res)
}
}
pub fn le_u32(i: &[u8]) -> IResult<&[u8], u32> {
if i.len() < 4 {
Incomplete(Needed::Size(4))
} else {
let res = ((i[3] as u32) << 24) + ((i[2] as u32) << 16) + ((i[1] as u32) << 8) + i[0] as u32;
Done(&i[4..], res)
}
}
pub fn le_u64(i: &[u8]) -> IResult<&[u8], u64> {
if i.len() < 8 {
Incomplete(Needed::Size(8))
} else {
let res = ((i[7] as u64) << 56) + ((i[6] as u64) << 48) + ((i[5] as u64) << 40) + ((i[4] as u64) << 32) +
((i[3] as u64) << 24) + ((i[2] as u64) << 16) + ((i[1] as u64) << 8) + i[0] as u64;
Done(&i[8..], res)
}
}
pub fn le_i8<'a>(i:&'a [u8]) -> IResult<&'a [u8], i8> {
map!(i, le_u8, | x | { x as i8 })
}
pub fn le_i16<'a>(i:&'a [u8]) -> IResult<&'a [u8], i16> {
map!(i, le_u16, | x | { x as i16 })
}
pub fn le_i32<'a>(i:&'a [u8]) -> IResult<&'a [u8], i32> {
map!(i, le_u32, | x | { x as i32 })
}
pub fn le_i64<'a>(i:&'a [u8]) -> IResult<&'a [u8], i64> {
map!(i, le_u64, | x | { x as i64 })
}
pub fn be_f32(input: &[u8]) -> IResult<&[u8], f32> {
match be_u32(input) {
Error(e) => Error(e),
Incomplete(e) => Incomplete(e),
Done(i,o) => {
unsafe {
Done(i, transmute::<u32, f32>(o))
}
}
}
}
pub fn be_f64(input: &[u8]) -> IResult<&[u8], f64> {
match be_u64(input) {
Error(e) => Error(e),
Incomplete(e) => Incomplete(e),
Done(i,o) => {
unsafe {
Done(i, transmute::<u64, f64>(o))
}
}
}
}
pub fn eof(input:&[u8]) -> IResult<&[u8], &[u8]> {
if input.len() == 0 {
Done(input, input)
} else {
Error(Position(ErrorCode::Eof as u32, input))
}
}
#[cfg(test)]
mod tests {
use super::*;
use internal::Needed;
use internal::IResult::*;
use internal::Err::*;
use util::ErrorCode;
#[test]
fn tag_closure() {
let x = tag_cl(&b"abcd"[..]);
let r = x(&b"abcdabcdefgh"[..]);
assert_eq!(r, Done(&b"abcdefgh"[..], &b"abcd"[..]));
let r2 = x(&b"abcefgh"[..]);
assert_eq!(r2, Error(Position(ErrorCode::TagClosure as u32, &b"abcefgh"[..])));
}
#[test]
fn character() {
let empty: &[u8] = b"";
let a: &[u8] = b"abcd";
let b: &[u8] = b"1234";
let c: &[u8] = b"a123";
let d: &[u8] = "azé12".as_bytes();
let e: &[u8] = b" ";
assert_eq!(alpha(a), Done(empty, a));
assert_eq!(alpha(b), Error(Position(ErrorCode::Alpha as u32,b)));
assert_eq!(alpha(c), Done(&c[1..], &b"a"[..]));
assert_eq!(alpha(d), Done("é12".as_bytes(), &b"az"[..]));
assert_eq!(digit(a), Error(Position(ErrorCode::Digit as u32,a)));
assert_eq!(digit(b), Done(empty, b));
assert_eq!(digit(c), Error(Position(ErrorCode::Digit as u32,c)));
assert_eq!(digit(d), Error(Position(ErrorCode::Digit as u32,d)));
assert_eq!(alphanumeric(a), Done(empty, a));
assert_eq!(alphanumeric(b), Done(empty, b));
assert_eq!(alphanumeric(c), Done(empty, c));
assert_eq!(alphanumeric(d), Done("é12".as_bytes(), &b"az"[..]));
assert_eq!(space(e), Done(&b""[..], &b" "[..]));
}
#[test]
fn is_not() {
let a: &[u8] = b"ab12cd\nefgh";
assert_eq!(not_line_ending(a), Done(&b"\nefgh"[..], &b"ab12cd"[..]));
let b: &[u8] = b"ab12cd\nefgh\nijkl";
assert_eq!(not_line_ending(b), Done(&b"\nefgh\nijkl"[..], &b"ab12cd"[..]));
let c: &[u8] = b"ab12cd";
assert_eq!(not_line_ending(c), Done(&b""[..], c));
}
#[test]
fn buffer_with_size() {
let i:Vec<u8> = vec![7,8];
let o:Vec<u8> = vec![4,5,6];
//let arr:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let arr:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let res = sized_buffer(&arr[..]);
assert_eq!(res, Done(&i[..], &o[..]))
}
/*#[test]
fn t1() {
let v1:Vec<u8> = vec![1,2,3];
let v2:Vec<u8> = vec![4,5,6];
let d = Done(&v1[..], &v2[..]);
let res = d.flat_map(print);
assert_eq!(res, Done(&v2[..], ()));
}*/
#[test]
fn length_value_test() {
let i1 = vec![7,8];
let o1 = vec![4, 5, 6];
let arr1:[u8; 6usize] = [3, 4, 5, 6, 7, 8];
let res1 = length_value(&arr1);
assert_eq!(Done(&i1[..], &o1[..]), res1);
let i2:Vec<u8> = vec![4,5,6,7,8];
let o2: &[u8] = b"";
let arr2:[u8; 6usize] = [0, 4, 5, 6, 7, 8];
let res2 = length_value(&arr2);
assert_eq!(Done(&i2[..], o2), res2);
let arr3:[u8; 7usize] = [8, 4, 5, 6, 7, 8, 9];
let res3 = length_value(&arr3);
//FIXME: should be incomplete
assert_eq!(Incomplete(Needed::Size(9)), res3);
}
#[test]
fn i8_tests() {
assert_eq!(be_i8(&[0x00]), Done(&b""[..], 0));
assert_eq!(be_i8(&[0x7f]), Done(&b""[..], 127));
assert_eq!(be_i8(&[0xff]), Done(&b""[..], -1));
assert_eq!(be_i8(&[0x80]), Done(&b""[..], -128));
}
#[test]
fn i16_tests() {
assert_eq!(be_i16(&[0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(be_i16(&[0x7f, 0xff]), Done(&b""[..], 32767_i16));
assert_eq!(be_i16(&[0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(be_i16(&[0x80, 0x00]), Done(&b""[..], -32768_i16));
}
#[test]
fn i32_tests() {
assert_eq!(be_i32(&[0x00, 0x00, 0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(be_i32(&[0x7f, 0xff, 0xff, 0xff]), Done(&b""[..], 2147483647_i32));
assert_eq!(be_i32(&[0xff, 0xff, 0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(be_i32(&[0x80, 0x00, 0x00, 0x00]), Done(&b""[..], -2147483648_i32));
}
#[test]
fn i64_tests() {
assert_eq!(be_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(be_i64(&[0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), Done(&b""[..], 9223372036854775807_i64));
assert_eq!(be_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(be_i64(&[0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), Done(&b""[..], -9223372036854775808_i64));
}
#[test]
fn le_i8_tests() {
assert_eq!(le_i8(&[0x00]), Done(&b""[..], 0));
assert_eq!(le_i8(&[0x7f]), Done(&b""[..], 127));
assert_eq!(le_i8(&[0xff]), Done(&b""[..], -1));
assert_eq!(le_i8(&[0x80]), Done(&b""[..], -128));
}
#[test]
fn le_i16_tests() {
assert_eq!(le_i16(&[0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(le_i16(&[0xff, 0x7f]), Done(&b""[..], 32767_i16));
assert_eq!(le_i16(&[0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(le_i16(&[0x00, 0x80]), Done(&b""[..], -32768_i16));
}
#[test]
fn le_i32_tests() {
assert_eq!(le_i32(&[0x00, 0x00, 0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(le_i32(&[0xff, 0xff, 0xff, 0x7f]), Done(&b""[..], 2147483647_i32));
assert_eq!(le_i32(&[0xff, 0xff, 0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(le_i32(&[0x00, 0x00, 0x00, 0x80]), Done(&b""[..], -2147483648_i32));
}
#[test]
fn le_i64_tests() {
assert_eq!(le_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), Done(&b""[..], 0));
assert_eq!(le_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f]), Done(&b""[..], 9223372036854775807_i64));
assert_eq!(le_i64(&[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), Done(&b""[..], -1));
assert_eq!(le_i64(&[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80]), Done(&b""[..], -9223372036854775808_i64));
}
#[test]
fn end_of_input() {
let not_over = &b"Hello, world!"[..];
let is_over = &b""[..];
let res_not_over = eof(not_over);
assert_eq!(res_not_over, Error(Position(ErrorCode::Eof as u32, not_over)));
let res_over = eof(is_over);
assert_eq!(res_over, Done(is_over, is_over));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.